From 140e0831a6907737c2a54d9160d2b82938d519df Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 19:53:55 -0500 Subject: [PATCH] =?UTF-8?q?chore:=20=F0=9F=90=9D=20Update=20SDK=20-=20Gene?= =?UTF-8?q?rate=200.52.1=20(#105)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: speakeasybot --- .speakeasy/gen.lock | 451 ++++++++++++------ .speakeasy/workflow.lock | 8 +- README.md | 6 + RELEASES.md | 12 +- docs/models/apiendpoint.md | 17 + docs/models/apikeysecret.md | 10 +- docs/models/apiserver.md | 9 + docs/models/appcues.md | 8 + docs/models/appfigures.md | 8 + docs/models/authenticationmethod.md | 6 + docs/models/authorizationtype.md | 4 +- docs/models/avroformat.md | 2 +- docs/models/baseurlprefix.md | 12 + docs/models/basic.md | 9 + ...rces3schemasformatfiletype.md => bitly.md} | 4 +- docs/models/brevo.md | 8 + docs/models/buildkite.md | 8 + docs/models/buzzsprout.md | 8 + docs/models/canny.md | 8 + docs/models/chameleon.md | 8 + docs/models/cimis.md | 8 + docs/models/continuousfeed.md | 14 - docs/models/csvformat.md | 1 - docs/models/destinationclickhouse.md | 1 + docs/models/destinationconfiguration.md | 12 +- docs/models/destinationdevnull.md | 9 - docs/models/destinationelasticsearch.md | 1 + docs/models/destinationelasticsearchmethod.md | 6 +- .../destinationelasticsearchnotunnel.md | 8 + ...tionelasticsearchpasswordauthentication.md | 12 + ...searchschemasauthenticationmethodmethod.md | 8 + .../destinationelasticsearchschemasmethod.md | 6 +- ...inationelasticsearchschemastunnelmethod.md | 10 + ...icsearchschemastunnelmethodtunnelmethod.md | 10 + ...nationelasticsearchsshkeyauthentication.md | 12 + ...destinationelasticsearchsshtunnelmethod.md | 25 + .../destinationelasticsearchtunnelmethod.md | 10 + docs/models/destinationmongodbnone.md | 10 + .../destinationmssqlschemassslmethod.md | 6 +- ...stinationmssqlschemassslmethodsslmethod.md | 8 + docs/models/destinationmssqlsslmethod.md | 6 +- docs/models/destinationmysql.md | 1 + docs/models/destinationoracle.md | 1 + .../destinationoracleencryptionmethod.md | 8 + ...estinationoracleschemasencryptionmethod.md | 8 + docs/models/destinationoracleunencrypted.md | 10 + ...singleschema.md => destinationpgvector.md} | 22 +- docs/models/destinationpgvectorazureopenai.md | 13 + .../destinationpgvectorbymarkdownheader.md | 11 + ...estinationpgvectorbyprogramminglanguage.md | 11 + docs/models/destinationpgvectorbyseparator.md | 12 + docs/models/destinationpgvectorcohere.md | 11 + docs/models/destinationpgvectorcredentials.md | 8 + docs/models/destinationpgvectorembedding.md | 37 ++ docs/models/destinationpgvectorfake.md | 10 + ...tionpgvectorfieldnamemappingconfigmodel.md | 9 + docs/models/destinationpgvectorlanguage.md | 25 + docs/models/destinationpgvectormode.md | 8 + docs/models/destinationpgvectoropenai.md | 11 + .../destinationpgvectoropenaicompatible.md | 14 + ...stinationpgvectorprocessingconfigmodel.md} | 25 +- ...npgvectorschemasembeddingembedding5mode.md | 8 + ...onpgvectorschemasembeddingembeddingmode.md | 8 + ...destinationpgvectorschemasembeddingmode.md | 8 + docs/models/destinationpgvectorschemasmode.md | 8 + ...estinationpgvectorschemasprocessingmode.md | 8 + ...vectorschemasprocessingtextsplittermode.md | 8 + ...sprocessingtextsplittertextsplittermode.md | 8 + .../models/destinationpgvectortextsplitter.md | 25 + docs/models/destinationpostgres.md | 1 + docs/models/dimensionsfilter.md | 4 +- docs/models/e2etestcloud.md | 8 - .../models/encryptedtrustservercertificate.md | 6 +- docs/models/encryptedverifycertificate.md | 2 +- docs/models/encryption.md | 8 +- docs/models/encryptionalgorithm.md | 2 +- docs/models/encryptionmethod.md | 6 +- docs/models/enterprise.md | 9 + docs/models/expression.md | 8 +- docs/models/ezofficeinventory.md | 8 + docs/models/filebasedstreamconfig.md | 2 - docs/models/filetype.md | 6 +- docs/models/filter_.md | 14 +- docs/models/front.md | 8 + .../models/{zendesksell.md => googletasks.md} | 4 +- docs/models/groupby.md | 13 + docs/models/guru.md | 8 + .../{testdestinationtype.md => height.md} | 4 +- docs/models/inferencetype.md | 11 - docs/models/issuesstreamexpandwith.md | 10 - docs/models/jotform.md | 8 + docs/models/jsonl.md | 13 - docs/models/localfilesystemlimited.md | 8 + docs/models/metricsfilter.md | 4 +- docs/models/mockcatalog.md | 17 - docs/models/multischema.md | 11 - docs/models/nativenetworkencryptionnne.md | 8 +- docs/models/nonet.md | 8 +- docs/models/nylas.md | 8 + docs/models/parquet.md | 13 - docs/models/{devnull.md => pgvector.md} | 4 +- docs/models/picqer.md | 8 + docs/models/piwik.md | 8 + docs/models/postgresconnection.md | 15 + ...rcee2etestcloudtype.md => productboard.md} | 4 +- docs/models/productive.md | 8 + docs/models/reportname.md | 10 +- docs/models/s3amazonwebservices.md | 17 +- docs/models/sevenshifts.md | 8 + docs/models/shortcut.md | 8 + docs/models/silent.md | 8 - docs/models/source7shifts.md | 10 + docs/models/sourceappcues.md | 12 + docs/models/sourceappfigures.md | 12 + docs/models/sourceasana.md | 11 +- ...orageschemasstreamsformatformatfiletype.md | 8 - docs/models/sourcebitly.md | 11 + docs/models/sourcebrevo.md | 10 + docs/models/sourcebuildkite.md | 10 + docs/models/sourcebuzzsprout.md | 11 + docs/models/sourcecanny.md | 9 + docs/models/sourcechameleon.md | 13 + docs/models/sourcecimis.md | 16 + docs/models/sourceclickhouse.md | 1 + docs/models/sourceconfiguration.md | 162 ++++++- docs/models/sourcee2etestcloud.md | 11 - docs/models/sourcee2etestcloudschemastype.md | 8 - docs/models/sourceezofficeinventory.md | 11 + docs/models/sourcefacebookmarketing.md | 3 - docs/models/sourcefiles3amazonwebservices.md | 10 - ...eschemasproviderstorageprovider8storage.md | 10 + docs/models/sourcefront.md | 11 + docs/models/sourcegcscsvformat.md | 1 - docs/models/sourcegcsfilebasedstreamconfig.md | 2 - docs/models/sourcegcsinferencetype.md | 11 - docs/models/sourcegithub.md | 2 - docs/models/sourcegitlab.md | 2 - .../sourcegoogleanalyticsdataapiexpression.md | 8 +- .../sourcegoogleanalyticsdataapifilter.md | 12 +- ...ydimensionfilterdimensionsfilter3filter.md | 29 ++ ...aydimensionfilterdimensionsfilterfilter.md | 16 +- ...customreportsarraydimensionfilterfilter.md | 16 +- ...aapischemascustomreportsarrayexpression.md | 8 +- ...sdataapischemascustomreportsarrayfilter.md | 16 +- ...mascustomreportsarraymetricfilterfilter.md | 16 +- ...sarraymetricfiltermetricsfilter1filter.md} | 18 +- ...googleanalyticsdataapischemasexpression.md | 8 +- ...urcegoogleanalyticsdataapischemasfilter.md | 31 +- .../sourcegoogledrivefilebasedstreamconfig.md | 1 - docs/models/sourcegooglesearchconsole.md | 1 - docs/models/sourcegoogletasks.md | 11 + docs/models/sourceguru.md | 13 + docs/models/sourceharvest.md | 1 - docs/models/sourceheight.md | 11 + docs/models/sourceinstagram.md | 2 - docs/models/sourcejira.md | 4 - docs/models/sourcejotform.md | 12 + docs/models/sourcejotformapiendpoint.md | 8 + .../models/sourcejotformschemasapiendpoint.md | 8 + docs/models/sourcekyve.md | 2 - docs/models/sourcelinkedinads.md | 1 + docs/models/sourcemailchimp.md | 1 - ...emicrosoftonedrivefilebasedstreamconfig.md | 1 - ...icrosoftsharepointfilebasedstreamconfig.md | 1 - docs/models/sourcemssqlsslmethod.md | 4 +- docs/models/sourcemssqlunencrypted.md | 10 + docs/models/sourcemysql.md | 1 + docs/models/sourcenylas.md | 12 + docs/models/sourceoracle.md | 2 +- docs/models/sourceoracleencryption.md | 25 + .../models/sourceoracleencryptionalgorithm.md | 12 + docs/models/sourceoracleencryptionmethod.md | 6 +- .../sourceoraclenativenetworkencryptionnne.md | 11 + ...oracleschemasencryptionencryptionmethod.md | 8 + .../sourceoracleschemasencryptionmethod.md | 8 + ...urceoracletlsencryptedverifycertificate.md | 11 + .../{avro.md => sourceoracleunencrypted.md} | 6 +- docs/models/sourcepicqer.md | 12 + docs/models/sourcepiwik.md | 11 + docs/models/sourceproductboard.md | 10 + docs/models/sourceproductive.md | 10 + docs/models/sources3.md | 27 +- docs/models/sources3avroformat.md | 2 +- docs/models/sources3csvformat.md | 3 +- docs/models/sources3filebasedstreamconfig.md | 2 - docs/models/sources3fileformat.md | 31 -- docs/models/sources3filetype.md | 6 +- docs/models/sources3inferencetype.md | 11 - docs/models/sources3jsonlformat.md | 6 +- docs/models/sources3parquetformat.md | 2 +- docs/models/sources3schemasfiletype.md | 6 +- docs/models/sources3schemasstreamsfiletype.md | 6 +- .../sources3schemasstreamsformatfiletype.md | 6 +- ...ces3schemasstreamsformatformat4filetype.md | 8 - ...ces3schemasstreamsformatformat5filetype.md | 8 - ...rces3schemasstreamsformatformatfiletype.md | 6 +- .../sources3unstructureddocumentformat.md | 2 +- docs/models/sourcesenseforce.md | 15 +- docs/models/sourcesftpbulkcsvformat.md | 1 - .../sourcesftpbulkfilebasedstreamconfig.md | 2 - docs/models/sourcesftpbulkinferencetype.md | 11 - docs/models/sourceshortcut.md | 11 + docs/models/sourcesmartsheets.md | 13 +- docs/models/sourcesurvicate.md | 10 + docs/models/sourceteamwork.md | 12 + docs/models/sourcewheniwork.md | 10 + docs/models/sourcezendesksell.md | 9 - docs/models/sourcezendesksupport.md | 1 - docs/models/sslmethod.md | 6 + docs/models/standalonemongodbinstance.md | 11 +- docs/models/storageprovider.md | 10 +- docs/models/survicate.md | 8 + docs/models/targetstype.md | 11 + docs/models/teamwork.md | 8 + docs/models/testdestination.md | 13 - docs/models/tlsencryptedverifycertificate.md | 2 +- docs/models/type.md | 8 - docs/models/unencrypted.md | 8 +- docs/models/unexpectedfieldbehavior.md | 12 - docs/models/unitofmeasure.md | 9 + docs/models/usernamepassword.md | 10 +- docs/models/wheniwork.md | 8 + docs/sdks/connections/README.md | 4 +- docs/sdks/destinations/README.md | 21 +- docs/sdks/permissions/README.md | 4 +- docs/sdks/sources/README.md | 45 +- docs/sdks/workspaces/README.md | 8 +- gen.yaml | 2 +- setup.py | 2 +- src/airbyte_api/models/__init__.py | 31 +- .../models/destination_clickhouse.py | 2 + .../models/destination_dev_null.py | 36 -- .../models/destination_elasticsearch.py | 85 +++- src/airbyte_api/models/destination_mongodb.py | 6 +- src/airbyte_api/models/destination_mssql.py | 23 +- src/airbyte_api/models/destination_mysql.py | 2 + src/airbyte_api/models/destination_oracle.py | 54 +++ .../models/destination_pgvector.py | 247 ++++++++++ .../models/destination_postgres.py | 2 + .../models/destinationconfiguration.py | 4 +- src/airbyte_api/models/source_7shifts.py | 25 + .../models/source_amazon_seller_partner.py | 8 + src/airbyte_api/models/source_appcues.py | 27 ++ src/airbyte_api/models/source_appfigures.py | 36 ++ src/airbyte_api/models/source_asana.py | 2 - .../models/source_azure_blob_storage.py | 16 +- src/airbyte_api/models/source_bitly.py | 25 + src/airbyte_api/models/source_brevo.py | 24 + src/airbyte_api/models/source_buildkite.py | 24 + src/airbyte_api/models/source_buzzsprout.py | 26 + src/airbyte_api/models/source_canny.py | 22 + src/airbyte_api/models/source_chameleon.py | 36 ++ src/airbyte_api/models/source_cimis.py | 42 ++ src/airbyte_api/models/source_clickhouse.py | 2 + .../models/source_e2e_test_cloud.py | 71 --- .../models/source_ezofficeinventory.py | 28 ++ .../models/source_facebook_marketing.py | 6 - src/airbyte_api/models/source_file.py | 18 +- src/airbyte_api/models/source_front.py | 26 + src/airbyte_api/models/source_gcs.py | 12 - src/airbyte_api/models/source_github.py | 4 - src/airbyte_api/models/source_gitlab.py | 4 - .../source_google_analytics_data_api.py | 32 +- src/airbyte_api/models/source_google_drive.py | 2 - .../models/source_google_search_console.py | 2 - src/airbyte_api/models/source_google_tasks.py | 26 + src/airbyte_api/models/source_guru.py | 29 ++ src/airbyte_api/models/source_harvest.py | 2 - src/airbyte_api/models/source_height.py | 26 + src/airbyte_api/models/source_instagram.py | 4 - src/airbyte_api/models/source_jira.py | 14 - src/airbyte_api/models/source_jotform.py | 63 +++ src/airbyte_api/models/source_kyve.py | 4 - src/airbyte_api/models/source_linkedin_ads.py | 2 + src/airbyte_api/models/source_mailchimp.py | 2 - .../models/source_microsoft_onedrive.py | 2 - .../models/source_microsoft_sharepoint.py | 2 - src/airbyte_api/models/source_mssql.py | 4 +- src/airbyte_api/models/source_mysql.py | 2 + src/airbyte_api/models/source_nylas.py | 31 ++ src/airbyte_api/models/source_oracle.py | 35 +- src/airbyte_api/models/source_picqer.py | 27 ++ src/airbyte_api/models/source_piwik.py | 24 + src/airbyte_api/models/source_productboard.py | 25 + src/airbyte_api/models/source_productive.py | 23 + src/airbyte_api/models/source_s3.py | 161 +------ src/airbyte_api/models/source_senseforce.py | 4 +- src/airbyte_api/models/source_sftp_bulk.py | 12 - src/airbyte_api/models/source_shortcut.py | 26 + src/airbyte_api/models/source_smartsheets.py | 2 - src/airbyte_api/models/source_survicate.py | 24 + src/airbyte_api/models/source_teamwork.py | 27 ++ src/airbyte_api/models/source_when_i_work.py | 24 + src/airbyte_api/models/source_zendesk_sell.py | 22 - .../models/source_zendesk_support.py | 2 - src/airbyte_api/models/sourceconfiguration.py | 29 +- src/airbyte_api/sdkconfiguration.py | 6 +- 297 files changed, 3188 insertions(+), 1203 deletions(-) create mode 100644 docs/models/apiendpoint.md create mode 100644 docs/models/apiserver.md create mode 100644 docs/models/appcues.md create mode 100644 docs/models/appfigures.md create mode 100644 docs/models/baseurlprefix.md create mode 100644 docs/models/basic.md rename docs/models/{sources3schemasformatfiletype.md => bitly.md} (51%) create mode 100644 docs/models/brevo.md create mode 100644 docs/models/buildkite.md create mode 100644 docs/models/buzzsprout.md create mode 100644 docs/models/canny.md create mode 100644 docs/models/chameleon.md create mode 100644 docs/models/cimis.md delete mode 100644 docs/models/continuousfeed.md delete mode 100644 docs/models/destinationdevnull.md create mode 100644 docs/models/destinationelasticsearchnotunnel.md create mode 100644 docs/models/destinationelasticsearchpasswordauthentication.md create mode 100644 docs/models/destinationelasticsearchschemasauthenticationmethodmethod.md create mode 100644 docs/models/destinationelasticsearchschemastunnelmethod.md create mode 100644 docs/models/destinationelasticsearchschemastunnelmethodtunnelmethod.md create mode 100644 docs/models/destinationelasticsearchsshkeyauthentication.md create mode 100644 docs/models/destinationelasticsearchsshtunnelmethod.md create mode 100644 docs/models/destinationelasticsearchtunnelmethod.md create mode 100644 docs/models/destinationmongodbnone.md create mode 100644 docs/models/destinationmssqlschemassslmethodsslmethod.md create mode 100644 docs/models/destinationoracleencryptionmethod.md create mode 100644 docs/models/destinationoracleschemasencryptionmethod.md create mode 100644 docs/models/destinationoracleunencrypted.md rename docs/models/{singleschema.md => destinationpgvector.md} (54%) create mode 100644 docs/models/destinationpgvectorazureopenai.md create mode 100644 docs/models/destinationpgvectorbymarkdownheader.md create mode 100644 docs/models/destinationpgvectorbyprogramminglanguage.md create mode 100644 docs/models/destinationpgvectorbyseparator.md create mode 100644 docs/models/destinationpgvectorcohere.md create mode 100644 docs/models/destinationpgvectorcredentials.md create mode 100644 docs/models/destinationpgvectorembedding.md create mode 100644 docs/models/destinationpgvectorfake.md create mode 100644 docs/models/destinationpgvectorfieldnamemappingconfigmodel.md create mode 100644 docs/models/destinationpgvectorlanguage.md create mode 100644 docs/models/destinationpgvectormode.md create mode 100644 docs/models/destinationpgvectoropenai.md create mode 100644 docs/models/destinationpgvectoropenaicompatible.md rename docs/models/{csv.md => destinationpgvectorprocessingconfigmodel.md} (76%) create mode 100644 docs/models/destinationpgvectorschemasembeddingembedding5mode.md create mode 100644 docs/models/destinationpgvectorschemasembeddingembeddingmode.md create mode 100644 docs/models/destinationpgvectorschemasembeddingmode.md create mode 100644 docs/models/destinationpgvectorschemasmode.md create mode 100644 docs/models/destinationpgvectorschemasprocessingmode.md create mode 100644 docs/models/destinationpgvectorschemasprocessingtextsplittermode.md create mode 100644 docs/models/destinationpgvectorschemasprocessingtextsplittertextsplittermode.md create mode 100644 docs/models/destinationpgvectortextsplitter.md delete mode 100644 docs/models/e2etestcloud.md create mode 100644 docs/models/enterprise.md create mode 100644 docs/models/ezofficeinventory.md create mode 100644 docs/models/front.md rename docs/models/{zendesksell.md => googletasks.md} (63%) create mode 100644 docs/models/groupby.md create mode 100644 docs/models/guru.md rename docs/models/{testdestinationtype.md => height.md} (57%) delete mode 100644 docs/models/inferencetype.md delete mode 100644 docs/models/issuesstreamexpandwith.md create mode 100644 docs/models/jotform.md delete mode 100644 docs/models/jsonl.md create mode 100644 docs/models/localfilesystemlimited.md delete mode 100644 docs/models/mockcatalog.md delete mode 100644 docs/models/multischema.md create mode 100644 docs/models/nylas.md delete mode 100644 docs/models/parquet.md rename docs/models/{devnull.md => pgvector.md} (64%) create mode 100644 docs/models/picqer.md create mode 100644 docs/models/piwik.md create mode 100644 docs/models/postgresconnection.md rename docs/models/{sourcee2etestcloudtype.md => productboard.md} (58%) create mode 100644 docs/models/productive.md create mode 100644 docs/models/sevenshifts.md create mode 100644 docs/models/shortcut.md delete mode 100644 docs/models/silent.md create mode 100644 docs/models/source7shifts.md create mode 100644 docs/models/sourceappcues.md create mode 100644 docs/models/sourceappfigures.md delete mode 100644 docs/models/sourceazureblobstorageschemasstreamsformatformatfiletype.md create mode 100644 docs/models/sourcebitly.md create mode 100644 docs/models/sourcebrevo.md create mode 100644 docs/models/sourcebuildkite.md create mode 100644 docs/models/sourcebuzzsprout.md create mode 100644 docs/models/sourcecanny.md create mode 100644 docs/models/sourcechameleon.md create mode 100644 docs/models/sourcecimis.md delete mode 100644 docs/models/sourcee2etestcloud.md delete mode 100644 docs/models/sourcee2etestcloudschemastype.md create mode 100644 docs/models/sourceezofficeinventory.md delete mode 100644 docs/models/sourcefiles3amazonwebservices.md create mode 100644 docs/models/sourcefileschemasproviderstorageprovider8storage.md create mode 100644 docs/models/sourcefront.md delete mode 100644 docs/models/sourcegcsinferencetype.md create mode 100644 docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filter.md rename docs/models/{sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md => sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filter.md} (60%) create mode 100644 docs/models/sourcegoogletasks.md create mode 100644 docs/models/sourceguru.md create mode 100644 docs/models/sourceheight.md create mode 100644 docs/models/sourcejotform.md create mode 100644 docs/models/sourcejotformapiendpoint.md create mode 100644 docs/models/sourcejotformschemasapiendpoint.md create mode 100644 docs/models/sourcemssqlunencrypted.md create mode 100644 docs/models/sourcenylas.md create mode 100644 docs/models/sourceoracleencryption.md create mode 100644 docs/models/sourceoracleencryptionalgorithm.md create mode 100644 docs/models/sourceoraclenativenetworkencryptionnne.md create mode 100644 docs/models/sourceoracleschemasencryptionencryptionmethod.md create mode 100644 docs/models/sourceoracleschemasencryptionmethod.md create mode 100644 docs/models/sourceoracletlsencryptedverifycertificate.md rename docs/models/{avro.md => sourceoracleunencrypted.md} (71%) create mode 100644 docs/models/sourcepicqer.md create mode 100644 docs/models/sourcepiwik.md create mode 100644 docs/models/sourceproductboard.md create mode 100644 docs/models/sourceproductive.md delete mode 100644 docs/models/sources3fileformat.md delete mode 100644 docs/models/sources3inferencetype.md delete mode 100644 docs/models/sources3schemasstreamsformatformat4filetype.md delete mode 100644 docs/models/sources3schemasstreamsformatformat5filetype.md delete mode 100644 docs/models/sourcesftpbulkinferencetype.md create mode 100644 docs/models/sourceshortcut.md create mode 100644 docs/models/sourcesurvicate.md create mode 100644 docs/models/sourceteamwork.md create mode 100644 docs/models/sourcewheniwork.md delete mode 100644 docs/models/sourcezendesksell.md create mode 100644 docs/models/survicate.md create mode 100644 docs/models/targetstype.md create mode 100644 docs/models/teamwork.md delete mode 100644 docs/models/testdestination.md delete mode 100644 docs/models/type.md delete mode 100644 docs/models/unexpectedfieldbehavior.md create mode 100644 docs/models/unitofmeasure.md create mode 100644 docs/models/wheniwork.md delete mode 100644 src/airbyte_api/models/destination_dev_null.py create mode 100644 src/airbyte_api/models/destination_pgvector.py create mode 100644 src/airbyte_api/models/source_7shifts.py create mode 100644 src/airbyte_api/models/source_appcues.py create mode 100644 src/airbyte_api/models/source_appfigures.py create mode 100644 src/airbyte_api/models/source_bitly.py create mode 100644 src/airbyte_api/models/source_brevo.py create mode 100644 src/airbyte_api/models/source_buildkite.py create mode 100644 src/airbyte_api/models/source_buzzsprout.py create mode 100644 src/airbyte_api/models/source_canny.py create mode 100644 src/airbyte_api/models/source_chameleon.py create mode 100644 src/airbyte_api/models/source_cimis.py delete mode 100644 src/airbyte_api/models/source_e2e_test_cloud.py create mode 100644 src/airbyte_api/models/source_ezofficeinventory.py create mode 100644 src/airbyte_api/models/source_front.py create mode 100644 src/airbyte_api/models/source_google_tasks.py create mode 100644 src/airbyte_api/models/source_guru.py create mode 100644 src/airbyte_api/models/source_height.py create mode 100644 src/airbyte_api/models/source_jotform.py create mode 100644 src/airbyte_api/models/source_nylas.py create mode 100644 src/airbyte_api/models/source_picqer.py create mode 100644 src/airbyte_api/models/source_piwik.py create mode 100644 src/airbyte_api/models/source_productboard.py create mode 100644 src/airbyte_api/models/source_productive.py create mode 100644 src/airbyte_api/models/source_shortcut.py create mode 100644 src/airbyte_api/models/source_survicate.py create mode 100644 src/airbyte_api/models/source_teamwork.py create mode 100644 src/airbyte_api/models/source_when_i_work.py delete mode 100644 src/airbyte_api/models/source_zendesk_sell.py diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index f3de962c..e797dd42 100755 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -1,12 +1,12 @@ lockVersion: 2.0.0 id: 07961597-3730-4940-9fd0-35eb4118eab3 management: - docChecksum: b43c86bb9d2242c34e3771d650a5173e + docChecksum: 1308fa7ec8504c6d50430974d4647f3f docVersion: 1.0.0 - speakeasyVersion: 1.395.1 - generationVersion: 2.415.0 - releaseVersion: 0.52.0 - configChecksum: ff78e7a1c972bbd2d2503bc59fcc7df2 + speakeasyVersion: 1.402.12 + generationVersion: 2.422.15 + releaseVersion: 0.52.1 + configChecksum: c0d4d67cf69009a4ccee8bae850f104e repoURL: https://github.com/airbytehq/airbyte-api-python-sdk.git repoSubDirectory: . installationURL: https://github.com/airbytehq/airbyte-api-python-sdk.git @@ -16,7 +16,7 @@ features: additionalDependencies: 0.1.0 additionalProperties: 0.1.0 constsAndDefaults: 0.1.4 - core: 4.8.7 + core: 4.8.10 globalSecurity: 2.83.7 globalSecurityCallbacks: 0.1.0 globalServerURLs: 2.82.2 @@ -113,13 +113,17 @@ generatedFiles: - docs/models/amplitude.md - docs/models/andgroup.md - docs/models/apiaccesstoken.md + - docs/models/apiendpoint.md - docs/models/apifydataset.md - docs/models/apikey.md - docs/models/apikeyauth.md - docs/models/apikeysecret.md - docs/models/apiparameterconfigmodel.md - docs/models/apipassword.md + - docs/models/apiserver.md - docs/models/apitoken.md + - docs/models/appcues.md + - docs/models/appfigures.md - docs/models/appfollow.md - docs/models/applications.md - docs/models/asana.md @@ -159,7 +163,6 @@ generatedFiles: - docs/models/authorizationtype.md - docs/models/authtype.md - docs/models/autogenerated.md - - docs/models/avro.md - docs/models/avroapacheavro.md - docs/models/avroformat.md - docs/models/awscloudtrail.md @@ -176,27 +179,36 @@ generatedFiles: - docs/models/bamboohr.md - docs/models/basecamp.md - docs/models/baseurl.md + - docs/models/baseurlprefix.md + - docs/models/basic.md - docs/models/batchedstandardinserts.md - docs/models/betweenfilter.md - docs/models/bigquery.md - docs/models/bingads.md + - docs/models/bitly.md - docs/models/bothusernameandpasswordisrequiredforauthenticationrequest.md - docs/models/braintree.md - docs/models/braze.md - docs/models/breezyhr.md + - docs/models/brevo.md + - docs/models/buildkite.md + - docs/models/buzzsprout.md - docs/models/bymarkdownheader.md - docs/models/byprogramminglanguage.md - docs/models/byseparator.md - docs/models/bzip2.md - docs/models/cachetype.md - docs/models/calendly.md + - docs/models/canny.md - docs/models/capturemodeadvanced.md - docs/models/cart.md - docs/models/categories.md - docs/models/centralapirouter.md + - docs/models/chameleon.md - docs/models/chargebee.md - docs/models/chartmogul.md - docs/models/choosehowtopartitiondata.md + - docs/models/cimis.md - docs/models/clazar.md - docs/models/clickhouse.md - docs/models/clickupapi.md @@ -230,7 +242,6 @@ generatedFiles: - docs/models/connectionsyncmodeenum.md - docs/models/connectiontype.md - docs/models/contenttype.md - - docs/models/continuousfeed.md - docs/models/conversionreporttime.md - docs/models/convex.md - docs/models/country.md @@ -238,7 +249,6 @@ generatedFiles: - docs/models/credentials.md - docs/models/credentialstitle.md - docs/models/credentialtype.md - - docs/models/csv.md - docs/models/csvcommaseparatedvalues.md - docs/models/csvformat.md - docs/models/csvheaderdefinition.md @@ -292,12 +302,19 @@ generatedFiles: - docs/models/destinationdatabricks.md - docs/models/destinationdatabricksauthtype.md - docs/models/destinationdatabricksschemasauthtype.md - - docs/models/destinationdevnull.md - docs/models/destinationduckdb.md - docs/models/destinationdynamodb.md - docs/models/destinationelasticsearch.md - docs/models/destinationelasticsearchmethod.md + - docs/models/destinationelasticsearchnotunnel.md + - docs/models/destinationelasticsearchpasswordauthentication.md + - docs/models/destinationelasticsearchschemasauthenticationmethodmethod.md - docs/models/destinationelasticsearchschemasmethod.md + - docs/models/destinationelasticsearchschemastunnelmethod.md + - docs/models/destinationelasticsearchschemastunnelmethodtunnelmethod.md + - docs/models/destinationelasticsearchsshkeyauthentication.md + - docs/models/destinationelasticsearchsshtunnelmethod.md + - docs/models/destinationelasticsearchtunnelmethod.md - docs/models/destinationfirebolt.md - docs/models/destinationfireboltloadingmethod.md - docs/models/destinationfireboltmethod.md @@ -360,6 +377,7 @@ generatedFiles: - docs/models/destinationmongodb.md - docs/models/destinationmongodbauthorization.md - docs/models/destinationmongodbinstance.md + - docs/models/destinationmongodbnone.md - docs/models/destinationmongodbnotunnel.md - docs/models/destinationmongodbpasswordauthentication.md - docs/models/destinationmongodbschemasauthorization.md @@ -373,6 +391,7 @@ generatedFiles: - docs/models/destinationmssqlnotunnel.md - docs/models/destinationmssqlpasswordauthentication.md - docs/models/destinationmssqlschemassslmethod.md + - docs/models/destinationmssqlschemassslmethodsslmethod.md - docs/models/destinationmssqlschemastunnelmethod.md - docs/models/destinationmssqlschemastunnelmethodtunnelmethod.md - docs/models/destinationmssqlsshkeyauthentication.md @@ -388,14 +407,40 @@ generatedFiles: - docs/models/destinationmysqlsshtunnelmethod.md - docs/models/destinationmysqltunnelmethod.md - docs/models/destinationoracle.md + - docs/models/destinationoracleencryptionmethod.md - docs/models/destinationoraclenotunnel.md - docs/models/destinationoraclepasswordauthentication.md + - docs/models/destinationoracleschemasencryptionmethod.md - docs/models/destinationoracleschemastunnelmethod.md - docs/models/destinationoracleschemastunnelmethodtunnelmethod.md - docs/models/destinationoraclesshkeyauthentication.md - docs/models/destinationoraclesshtunnelmethod.md - docs/models/destinationoracletunnelmethod.md + - docs/models/destinationoracleunencrypted.md - docs/models/destinationpatchrequest.md + - docs/models/destinationpgvector.md + - docs/models/destinationpgvectorazureopenai.md + - docs/models/destinationpgvectorbymarkdownheader.md + - docs/models/destinationpgvectorbyprogramminglanguage.md + - docs/models/destinationpgvectorbyseparator.md + - docs/models/destinationpgvectorcohere.md + - docs/models/destinationpgvectorcredentials.md + - docs/models/destinationpgvectorembedding.md + - docs/models/destinationpgvectorfake.md + - docs/models/destinationpgvectorfieldnamemappingconfigmodel.md + - docs/models/destinationpgvectorlanguage.md + - docs/models/destinationpgvectormode.md + - docs/models/destinationpgvectoropenai.md + - docs/models/destinationpgvectoropenaicompatible.md + - docs/models/destinationpgvectorprocessingconfigmodel.md + - docs/models/destinationpgvectorschemasembeddingembedding5mode.md + - docs/models/destinationpgvectorschemasembeddingembeddingmode.md + - docs/models/destinationpgvectorschemasembeddingmode.md + - docs/models/destinationpgvectorschemasmode.md + - docs/models/destinationpgvectorschemasprocessingmode.md + - docs/models/destinationpgvectorschemasprocessingtextsplittermode.md + - docs/models/destinationpgvectorschemasprocessingtextsplittertextsplittermode.md + - docs/models/destinationpgvectortextsplitter.md - docs/models/destinationpinecone.md - docs/models/destinationpineconeazureopenai.md - docs/models/destinationpineconebymarkdownheader.md @@ -634,7 +679,6 @@ generatedFiles: - docs/models/destinationyellowbrickverifyfull.md - docs/models/detailtype.md - docs/models/detectchangeswithxminsystemcolumn.md - - docs/models/devnull.md - docs/models/dimension.md - docs/models/dimensionsfilter.md - docs/models/disable.md @@ -649,7 +693,6 @@ generatedFiles: - docs/models/duckdb.md - docs/models/dynamodb.md - docs/models/dynamodbregion.md - - docs/models/e2etestcloud.md - docs/models/elasticsearch.md - docs/models/emailoctopus.md - docs/models/embedding.md @@ -660,12 +703,14 @@ generatedFiles: - docs/models/encryptionalgorithm.md - docs/models/encryptionmethod.md - docs/models/engagementwindowdays.md + - docs/models/enterprise.md - docs/models/environment.md - docs/models/eubasedaccount.md - docs/models/excelformat.md - docs/models/exchangerates.md - docs/models/expression.md - docs/models/externaltablevias3.md + - docs/models/ezofficeinventory.md - docs/models/facebookmarketing.md - docs/models/facebookmarketingcredentials.md - docs/models/fake.md @@ -693,6 +738,7 @@ generatedFiles: - docs/models/fromcsv.md - docs/models/fromfield.md - docs/models/fromvalue.md + - docs/models/front.md - docs/models/gainsightpx.md - docs/models/gcs.md - docs/models/gcsbucketregion.md @@ -722,17 +768,21 @@ generatedFiles: - docs/models/googlesearchconsole.md - docs/models/googlesheets.md - docs/models/googlesheetscredentials.md + - docs/models/googletasks.md - docs/models/googlewebfonts.md - docs/models/granularity.md - docs/models/granularityforgeolocationregion.md - docs/models/granularityforperiodicreports.md - docs/models/greenhouse.md - docs/models/gridly.md + - docs/models/groupby.md + - docs/models/guru.md - docs/models/gzip.md - docs/models/hardcodedrecords.md - docs/models/harvest.md - docs/models/header.md - docs/models/headerdefinitiontype.md + - docs/models/height.md - docs/models/hibob.md - docs/models/highlevel.md - docs/models/hmackey.md @@ -744,7 +794,6 @@ generatedFiles: - docs/models/iamuser.md - docs/models/in_.md - docs/models/indexing.md - - docs/models/inferencetype.md - docs/models/initiateoauthrequest.md - docs/models/inlistfilter.md - docs/models/insightconfig.md @@ -756,7 +805,6 @@ generatedFiles: - docs/models/intercom.md - docs/models/invalidcdcpositionbehavioradvanced.md - docs/models/ip2whois.md - - docs/models/issuesstreamexpandwith.md - docs/models/iterable.md - docs/models/jira.md - docs/models/jobcreaterequest.md @@ -764,7 +812,7 @@ generatedFiles: - docs/models/jobsresponse.md - docs/models/jobstatusenum.md - docs/models/jobtypeenum.md - - docs/models/jsonl.md + - docs/models/jotform.md - docs/models/jsonlformat.md - docs/models/jsonlinesnewlinedelimitedjson.md - docs/models/k6cloud.md @@ -785,6 +833,7 @@ generatedFiles: - docs/models/linnworks.md - docs/models/loadingmethod.md - docs/models/local.md + - docs/models/localfilesystemlimited.md - docs/models/loginpassword.md - docs/models/lokalise.md - docs/models/looker.md @@ -806,7 +855,6 @@ generatedFiles: - docs/models/microsoftteamscredentials.md - docs/models/milvus.md - docs/models/mixpanel.md - - docs/models/mockcatalog.md - docs/models/mode.md - docs/models/monday.md - docs/models/mondaycredentials.md @@ -816,7 +864,6 @@ generatedFiles: - docs/models/mongodbinstancetype.md - docs/models/mongodbv2.md - docs/models/mssql.md - - docs/models/multischema.md - docs/models/myhours.md - docs/models/mysql.md - docs/models/namespacedefinitionenum.md @@ -839,6 +886,7 @@ generatedFiles: - docs/models/notunnel.md - docs/models/nullable.md - docs/models/numericfilter.md + - docs/models/nylas.md - docs/models/nytimes.md - docs/models/oauth.md - docs/models/oauth20.md @@ -869,7 +917,6 @@ generatedFiles: - docs/models/outputformat.md - docs/models/outputformatwildcard.md - docs/models/outreach.md - - docs/models/parquet.md - docs/models/parquetcolumnarstorage.md - docs/models/parquetformat.md - docs/models/parsingstrategy.md @@ -889,11 +936,14 @@ generatedFiles: - docs/models/persistiq.md - docs/models/personalaccesstoken.md - docs/models/pexelsapi.md + - docs/models/pgvector.md + - docs/models/picqer.md - docs/models/pinecone.md - docs/models/pinterest.md - docs/models/pinterestcredentials.md - docs/models/pipedrive.md - docs/models/pivotcategory.md + - docs/models/piwik.md - docs/models/planhat.md - docs/models/plugin.md - docs/models/pocket.md @@ -901,6 +951,7 @@ generatedFiles: - docs/models/pokemonname.md - docs/models/polygonstockapi.md - docs/models/postgres.md + - docs/models/postgresconnection.md - docs/models/posthog.md - docs/models/postmarkapp.md - docs/models/prefer.md @@ -910,7 +961,9 @@ generatedFiles: - docs/models/privatetoken.md - docs/models/processing.md - docs/models/processingconfigmodel.md + - docs/models/productboard.md - docs/models/productcatalog.md + - docs/models/productive.md - docs/models/projectsecret.md - docs/models/publicpermissiontype.md - docs/models/pubsub.md @@ -975,6 +1028,7 @@ generatedFiles: - docs/models/serviceaccountkeyauthentication.md - docs/models/servicekeyauthentication.md - docs/models/servicename.md + - docs/models/sevenshifts.md - docs/models/sftp.md - docs/models/sftpbulk.md - docs/models/sftpjson.md @@ -983,11 +1037,10 @@ generatedFiles: - docs/models/shopify.md - docs/models/shopifyauthorizationmethod.md - docs/models/shopifycredentials.md + - docs/models/shortcut.md - docs/models/shortio.md - docs/models/signinviagoogleoauth.md - docs/models/signinviaslackoauth.md - - docs/models/silent.md - - docs/models/singleschema.md - docs/models/singlestoreaccesstoken.md - docs/models/site.md - docs/models/slack.md @@ -1004,6 +1057,7 @@ generatedFiles: - docs/models/snowflakecredentials.md - docs/models/sonarcloud.md - docs/models/sortby.md + - docs/models/source7shifts.md - docs/models/sourceaha.md - docs/models/sourceairbyte.md - docs/models/sourceaircall.md @@ -1024,6 +1078,8 @@ generatedFiles: - docs/models/sourceamazonsqsawsregion.md - docs/models/sourceamplitude.md - docs/models/sourceapifydataset.md + - docs/models/sourceappcues.md + - docs/models/sourceappfigures.md - docs/models/sourceappfollow.md - docs/models/sourceasana.md - docs/models/sourceasanaasana.md @@ -1046,7 +1102,6 @@ generatedFiles: - docs/models/sourceazureblobstorageschemasheaderdefinitiontype.md - docs/models/sourceazureblobstorageschemasstreamsfiletype.md - docs/models/sourceazureblobstorageschemasstreamsformatfiletype.md - - docs/models/sourceazureblobstorageschemasstreamsformatformatfiletype.md - docs/models/sourceazuretable.md - docs/models/sourcebamboohr.md - docs/models/sourcebasecamp.md @@ -1054,17 +1109,24 @@ generatedFiles: - docs/models/sourcebigquerybigquery.md - docs/models/sourcebingads.md - docs/models/sourcebingadsbingads.md + - docs/models/sourcebitly.md - docs/models/sourcebraintree.md - docs/models/sourcebraintreeenvironment.md - docs/models/sourcebraze.md - docs/models/sourcebreezyhr.md + - docs/models/sourcebrevo.md + - docs/models/sourcebuildkite.md + - docs/models/sourcebuzzsprout.md - docs/models/sourcecalendly.md + - docs/models/sourcecanny.md - docs/models/sourcecart.md - docs/models/sourcecartauthorizationmethod.md - docs/models/sourcecartauthtype.md - docs/models/sourcecartschemasauthtype.md + - docs/models/sourcechameleon.md - docs/models/sourcechargebee.md - docs/models/sourcechartmogul.md + - docs/models/sourcecimis.md - docs/models/sourceclazar.md - docs/models/sourceclickhouse.md - docs/models/sourceclickhouseclickhouse.md @@ -1101,11 +1163,9 @@ generatedFiles: - docs/models/sourcedynamodbdynamodb.md - docs/models/sourcedynamodbdynamodbregion.md - docs/models/sourcedynamodbschemasauthtype.md - - docs/models/sourcee2etestcloud.md - - docs/models/sourcee2etestcloudschemastype.md - - docs/models/sourcee2etestcloudtype.md - docs/models/sourceemailoctopus.md - docs/models/sourceexchangerates.md + - docs/models/sourceezofficeinventory.md - docs/models/sourcefacebookmarketing.md - docs/models/sourcefacebookmarketingactionreporttime.md - docs/models/sourcefacebookmarketingauthentication.md @@ -1118,10 +1178,10 @@ generatedFiles: - docs/models/sourcefaunadeletionmode.md - docs/models/sourcefaunaschemasdeletionmode.md - docs/models/sourcefile.md - - docs/models/sourcefiles3amazonwebservices.md - docs/models/sourcefileschemasproviderstorage.md - docs/models/sourcefileschemasproviderstorageprovider6storage.md - docs/models/sourcefileschemasproviderstorageprovider7storage.md + - docs/models/sourcefileschemasproviderstorageprovider8storage.md - docs/models/sourcefileschemasproviderstorageproviderstorage.md - docs/models/sourcefileschemasstorage.md - docs/models/sourcefilestorage.md @@ -1131,6 +1191,7 @@ generatedFiles: - docs/models/sourcefreshcaller.md - docs/models/sourcefreshdesk.md - docs/models/sourcefreshsales.md + - docs/models/sourcefront.md - docs/models/sourcegainsightpx.md - docs/models/sourcegcs.md - docs/models/sourcegcsautogenerated.md @@ -1143,7 +1204,6 @@ generatedFiles: - docs/models/sourcegcsfromcsv.md - docs/models/sourcegcsgcs.md - docs/models/sourcegcsheaderdefinitiontype.md - - docs/models/sourcegcsinferencetype.md - docs/models/sourcegcsjsonlformat.md - docs/models/sourcegcslocal.md - docs/models/sourcegcsmode.md @@ -1216,7 +1276,6 @@ generatedFiles: - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1expressionsint64value.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1expressionsvalidenums.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1expressionsvaluetype.md - - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filtername.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1int64value.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1validenums.md @@ -1245,6 +1304,7 @@ generatedFiles: - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3expressionfiltervaluetype.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3expressionint64value.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3expressionvaluetype.md + - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filter.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filtername.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3int64value.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3validenums.md @@ -1298,6 +1358,7 @@ generatedFiles: - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1expressionsfiltervaluetype.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1expressionsint64value.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1expressionsvaluetype.md + - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filter.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filtername.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1int64value.md - docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1validenums.md @@ -1453,15 +1514,18 @@ generatedFiles: - docs/models/sourcegooglesheetsgooglesheets.md - docs/models/sourcegooglesheetsschemasauthtype.md - docs/models/sourcegooglesheetsserviceaccountkeyauthentication.md + - docs/models/sourcegoogletasks.md - docs/models/sourcegooglewebfonts.md - docs/models/sourcegreenhouse.md - docs/models/sourcegridly.md + - docs/models/sourceguru.md - docs/models/sourcehardcodedrecords.md - docs/models/sourceharvest.md - docs/models/sourceharvestauthenticatewithpersonalaccesstoken.md - docs/models/sourceharvestauthenticationmechanism.md - docs/models/sourceharvestauthtype.md - docs/models/sourceharvestschemasauthtype.md + - docs/models/sourceheight.md - docs/models/sourcehibob.md - docs/models/sourcehighlevel.md - docs/models/sourcehubplanner.md @@ -1480,6 +1544,9 @@ generatedFiles: - docs/models/sourceip2whois.md - docs/models/sourceiterable.md - docs/models/sourcejira.md + - docs/models/sourcejotform.md + - docs/models/sourcejotformapiendpoint.md + - docs/models/sourcejotformschemasapiendpoint.md - docs/models/sourcek6cloud.md - docs/models/sourceklarna.md - docs/models/sourceklarnaregion.md @@ -1619,6 +1686,7 @@ generatedFiles: - docs/models/sourcemssqlsshtunnelmethod.md - docs/models/sourcemssqlsslmethod.md - docs/models/sourcemssqltunnelmethod.md + - docs/models/sourcemssqlunencrypted.md - docs/models/sourcemyhours.md - docs/models/sourcemysql.md - docs/models/sourcemysqlinvalidcdcpositionbehavioradvanced.md @@ -1649,6 +1717,7 @@ generatedFiles: - docs/models/sourcenotionnotion.md - docs/models/sourcenotionoauth20.md - docs/models/sourcenotionschemasauthtype.md + - docs/models/sourcenylas.md - docs/models/sourcenytimes.md - docs/models/sourceokta.md - docs/models/sourceoktaapitoken.md @@ -1661,15 +1730,22 @@ generatedFiles: - docs/models/sourceonesignal.md - docs/models/sourceoracle.md - docs/models/sourceoracleconnectiontype.md + - docs/models/sourceoracleencryption.md + - docs/models/sourceoracleencryptionalgorithm.md - docs/models/sourceoracleencryptionmethod.md + - docs/models/sourceoraclenativenetworkencryptionnne.md - docs/models/sourceoraclenotunnel.md - docs/models/sourceoracleoracle.md - docs/models/sourceoraclepasswordauthentication.md + - docs/models/sourceoracleschemasencryptionencryptionmethod.md + - docs/models/sourceoracleschemasencryptionmethod.md - docs/models/sourceoracleschemastunnelmethod.md - docs/models/sourceoracleschemastunnelmethodtunnelmethod.md - docs/models/sourceoraclesshkeyauthentication.md - docs/models/sourceoraclesshtunnelmethod.md + - docs/models/sourceoracletlsencryptedverifycertificate.md - docs/models/sourceoracletunnelmethod.md + - docs/models/sourceoracleunencrypted.md - docs/models/sourceorb.md - docs/models/sourceorbit.md - docs/models/sourceoutbrainamplify.md @@ -1684,6 +1760,7 @@ generatedFiles: - docs/models/sourcepennylane.md - docs/models/sourcepersistiq.md - docs/models/sourcepexelsapi.md + - docs/models/sourcepicqer.md - docs/models/sourcepinterest.md - docs/models/sourcepinterestauthmethod.md - docs/models/sourcepinterestlevel.md @@ -1691,6 +1768,7 @@ generatedFiles: - docs/models/sourcepinterestschemasvalidenums.md - docs/models/sourcepinterestvalidenums.md - docs/models/sourcepipedrive.md + - docs/models/sourcepiwik.md - docs/models/sourceplanhat.md - docs/models/sourcepocket.md - docs/models/sourcepocketsortby.md @@ -1727,6 +1805,8 @@ generatedFiles: - docs/models/sourceposthog.md - docs/models/sourcepostmarkapp.md - docs/models/sourceprestashop.md + - docs/models/sourceproductboard.md + - docs/models/sourceproductive.md - docs/models/sourceputrequest.md - docs/models/sourcepypi.md - docs/models/sourcequalaroo.md @@ -1752,12 +1832,10 @@ generatedFiles: - docs/models/sources3csvformat.md - docs/models/sources3csvheaderdefinition.md - docs/models/sources3filebasedstreamconfig.md - - docs/models/sources3fileformat.md - docs/models/sources3filetype.md - docs/models/sources3format.md - docs/models/sources3fromcsv.md - docs/models/sources3headerdefinitiontype.md - - docs/models/sources3inferencetype.md - docs/models/sources3jsonlformat.md - docs/models/sources3local.md - docs/models/sources3mode.md @@ -1766,12 +1844,9 @@ generatedFiles: - docs/models/sources3processing.md - docs/models/sources3s3.md - docs/models/sources3schemasfiletype.md - - docs/models/sources3schemasformatfiletype.md - docs/models/sources3schemasheaderdefinitiontype.md - docs/models/sources3schemasstreamsfiletype.md - docs/models/sources3schemasstreamsformatfiletype.md - - docs/models/sources3schemasstreamsformatformat4filetype.md - - docs/models/sources3schemasstreamsformatformat5filetype.md - docs/models/sources3schemasstreamsformatformatfiletype.md - docs/models/sources3schemasstreamsheaderdefinitiontype.md - docs/models/sources3unstructureddocumentformat.md @@ -1808,7 +1883,6 @@ generatedFiles: - docs/models/sourcesftpbulkformat.md - docs/models/sourcesftpbulkfromcsv.md - docs/models/sourcesftpbulkheaderdefinitiontype.md - - docs/models/sourcesftpbulkinferencetype.md - docs/models/sourcesftpbulkjsonlformat.md - docs/models/sourcesftpbulklocal.md - docs/models/sourcesftpbulkmode.md @@ -1836,6 +1910,7 @@ generatedFiles: - docs/models/sourceshopifyoauth20.md - docs/models/sourceshopifyschemasauthmethod.md - docs/models/sourceshopifyshopify.md + - docs/models/sourceshortcut.md - docs/models/sourceshortio.md - docs/models/sourceslack.md - docs/models/sourceslackapitoken.md @@ -1879,6 +1954,8 @@ generatedFiles: - docs/models/sourcesurveymonkeysurveymonkey.md - docs/models/sourcesurveysparrow.md - docs/models/sourcesurveysparrowurlbase.md + - docs/models/sourcesurvicate.md + - docs/models/sourceteamwork.md - docs/models/sourcetempo.md - docs/models/sourcetheguardianapi.md - docs/models/sourcetiktokmarketing.md @@ -1908,6 +1985,7 @@ generatedFiles: - docs/models/sourceuscensus.md - docs/models/sourcevantage.md - docs/models/sourcewebflow.md + - docs/models/sourcewheniwork.md - docs/models/sourcewhiskyhunter.md - docs/models/sourcewikipediapageviews.md - docs/models/sourcewoocommerce.md @@ -1923,7 +2001,6 @@ generatedFiles: - docs/models/sourcezendeskchatoauth20.md - docs/models/sourcezendeskchatschemascredentials.md - docs/models/sourcezendeskchatzendeskchat.md - - docs/models/sourcezendesksell.md - docs/models/sourcezendesksunshine.md - docs/models/sourcezendesksunshineapitoken.md - docs/models/sourcezendesksunshineauthmethod.md @@ -1976,12 +2053,13 @@ generatedFiles: - docs/models/surveymonkeyauthorizationmethod.md - docs/models/surveymonkeycredentials.md - docs/models/surveysparrow.md + - docs/models/survicate.md - docs/models/swipeupattributionwindow.md - docs/models/systemidsid.md + - docs/models/targetstype.md + - docs/models/teamwork.md - docs/models/tempo.md - docs/models/teradata.md - - docs/models/testdestination.md - - docs/models/testdestinationtype.md - docs/models/textsplitter.md - docs/models/theguardianapi.md - docs/models/tiktokmarketing.md @@ -2000,12 +2078,11 @@ generatedFiles: - docs/models/twilio.md - docs/models/twiliotaskrouter.md - docs/models/twitter.md - - docs/models/type.md - docs/models/typeform.md - docs/models/typeformcredentials.md - docs/models/typesense.md - docs/models/unencrypted.md - - docs/models/unexpectedfieldbehavior.md + - docs/models/unitofmeasure.md - docs/models/unstructureddocumentformat.md - docs/models/updatemethod.md - docs/models/uploadingmethod.md @@ -2035,6 +2112,7 @@ generatedFiles: - docs/models/viewwindowdays.md - docs/models/weaviate.md - docs/models/webflow.md + - docs/models/wheniwork.md - docs/models/whiskyhunter.md - docs/models/wikipediapageviews.md - docs/models/woocommerce.md @@ -2052,7 +2130,6 @@ generatedFiles: - docs/models/youtubeanalyticscredentials.md - docs/models/zendeskchat.md - docs/models/zendeskchatcredentials.md - - docs/models/zendesksell.md - docs/models/zendesksunshine.md - docs/models/zendesksunshinecredentials.md - docs/models/zendesksupport.md @@ -2151,7 +2228,6 @@ generatedFiles: - src/airbyte_api/models/destination_clickhouse.py - src/airbyte_api/models/destination_convex.py - src/airbyte_api/models/destination_databricks.py - - src/airbyte_api/models/destination_dev_null.py - src/airbyte_api/models/destination_duckdb.py - src/airbyte_api/models/destination_dynamodb.py - src/airbyte_api/models/destination_elasticsearch.py @@ -2164,6 +2240,7 @@ generatedFiles: - src/airbyte_api/models/destination_mssql.py - src/airbyte_api/models/destination_mysql.py - src/airbyte_api/models/destination_oracle.py + - src/airbyte_api/models/destination_pgvector.py - src/airbyte_api/models/destination_pinecone.py - src/airbyte_api/models/destination_postgres.py - src/airbyte_api/models/destination_pubsub.py @@ -2244,6 +2321,7 @@ generatedFiles: - src/airbyte_api/models/smartsheets.py - src/airbyte_api/models/snapchat_marketing.py - src/airbyte_api/models/snowflake.py + - src/airbyte_api/models/source_7shifts.py - src/airbyte_api/models/source_aha.py - src/airbyte_api/models/source_airbyte.py - src/airbyte_api/models/source_aircall.py @@ -2253,6 +2331,8 @@ generatedFiles: - src/airbyte_api/models/source_amazon_sqs.py - src/airbyte_api/models/source_amplitude.py - src/airbyte_api/models/source_apify_dataset.py + - src/airbyte_api/models/source_appcues.py + - src/airbyte_api/models/source_appfigures.py - src/airbyte_api/models/source_appfollow.py - src/airbyte_api/models/source_asana.py - src/airbyte_api/models/source_auth0.py @@ -2263,13 +2343,20 @@ generatedFiles: - src/airbyte_api/models/source_basecamp.py - src/airbyte_api/models/source_bigquery.py - src/airbyte_api/models/source_bing_ads.py + - src/airbyte_api/models/source_bitly.py - src/airbyte_api/models/source_braintree.py - src/airbyte_api/models/source_braze.py - src/airbyte_api/models/source_breezy_hr.py + - src/airbyte_api/models/source_brevo.py + - src/airbyte_api/models/source_buildkite.py + - src/airbyte_api/models/source_buzzsprout.py - src/airbyte_api/models/source_calendly.py + - src/airbyte_api/models/source_canny.py - src/airbyte_api/models/source_cart.py + - src/airbyte_api/models/source_chameleon.py - src/airbyte_api/models/source_chargebee.py - src/airbyte_api/models/source_chartmogul.py + - src/airbyte_api/models/source_cimis.py - src/airbyte_api/models/source_clazar.py - src/airbyte_api/models/source_clickhouse.py - src/airbyte_api/models/source_clickup_api.py @@ -2290,9 +2377,9 @@ generatedFiles: - src/airbyte_api/models/source_dockerhub.py - src/airbyte_api/models/source_dremio.py - src/airbyte_api/models/source_dynamodb.py - - src/airbyte_api/models/source_e2e_test_cloud.py - src/airbyte_api/models/source_emailoctopus.py - src/airbyte_api/models/source_exchange_rates.py + - src/airbyte_api/models/source_ezofficeinventory.py - src/airbyte_api/models/source_facebook_marketing.py - src/airbyte_api/models/source_faker.py - src/airbyte_api/models/source_fauna.py @@ -2302,6 +2389,7 @@ generatedFiles: - src/airbyte_api/models/source_freshcaller.py - src/airbyte_api/models/source_freshdesk.py - src/airbyte_api/models/source_freshsales.py + - src/airbyte_api/models/source_front.py - src/airbyte_api/models/source_gainsight_px.py - src/airbyte_api/models/source_gcs.py - src/airbyte_api/models/source_getlago.py @@ -2317,11 +2405,14 @@ generatedFiles: - src/airbyte_api/models/source_google_pagespeed_insights.py - src/airbyte_api/models/source_google_search_console.py - src/airbyte_api/models/source_google_sheets.py + - src/airbyte_api/models/source_google_tasks.py - src/airbyte_api/models/source_google_webfonts.py - src/airbyte_api/models/source_greenhouse.py - src/airbyte_api/models/source_gridly.py + - src/airbyte_api/models/source_guru.py - src/airbyte_api/models/source_hardcoded_records.py - src/airbyte_api/models/source_harvest.py + - src/airbyte_api/models/source_height.py - src/airbyte_api/models/source_hibob.py - src/airbyte_api/models/source_high_level.py - src/airbyte_api/models/source_hubplanner.py @@ -2333,6 +2424,7 @@ generatedFiles: - src/airbyte_api/models/source_ip2whois.py - src/airbyte_api/models/source_iterable.py - src/airbyte_api/models/source_jira.py + - src/airbyte_api/models/source_jotform.py - src/airbyte_api/models/source_k6_cloud.py - src/airbyte_api/models/source_klarna.py - src/airbyte_api/models/source_klaviyo.py @@ -2364,6 +2456,7 @@ generatedFiles: - src/airbyte_api/models/source_netsuite.py - src/airbyte_api/models/source_northpass_lms.py - src/airbyte_api/models/source_notion.py + - src/airbyte_api/models/source_nylas.py - src/airbyte_api/models/source_nytimes.py - src/airbyte_api/models/source_okta.py - src/airbyte_api/models/source_omnisend.py @@ -2379,8 +2472,10 @@ generatedFiles: - src/airbyte_api/models/source_pennylane.py - src/airbyte_api/models/source_persistiq.py - src/airbyte_api/models/source_pexels_api.py + - src/airbyte_api/models/source_picqer.py - src/airbyte_api/models/source_pinterest.py - src/airbyte_api/models/source_pipedrive.py + - src/airbyte_api/models/source_piwik.py - src/airbyte_api/models/source_planhat.py - src/airbyte_api/models/source_pocket.py - src/airbyte_api/models/source_pokeapi.py @@ -2389,6 +2484,8 @@ generatedFiles: - src/airbyte_api/models/source_posthog.py - src/airbyte_api/models/source_postmarkapp.py - src/airbyte_api/models/source_prestashop.py + - src/airbyte_api/models/source_productboard.py + - src/airbyte_api/models/source_productive.py - src/airbyte_api/models/source_pypi.py - src/airbyte_api/models/source_qualaroo.py - src/airbyte_api/models/source_railz.py @@ -2415,6 +2512,7 @@ generatedFiles: - src/airbyte_api/models/source_sftp.py - src/airbyte_api/models/source_sftp_bulk.py - src/airbyte_api/models/source_shopify.py + - src/airbyte_api/models/source_shortcut.py - src/airbyte_api/models/source_shortio.py - src/airbyte_api/models/source_slack.py - src/airbyte_api/models/source_smaily.py @@ -2429,6 +2527,8 @@ generatedFiles: - src/airbyte_api/models/source_stripe.py - src/airbyte_api/models/source_survey_sparrow.py - src/airbyte_api/models/source_surveymonkey.py + - src/airbyte_api/models/source_survicate.py + - src/airbyte_api/models/source_teamwork.py - src/airbyte_api/models/source_tempo.py - src/airbyte_api/models/source_the_guardian_api.py - src/airbyte_api/models/source_tiktok_marketing.py @@ -2442,6 +2542,7 @@ generatedFiles: - src/airbyte_api/models/source_us_census.py - src/airbyte_api/models/source_vantage.py - src/airbyte_api/models/source_webflow.py + - src/airbyte_api/models/source_when_i_work.py - src/airbyte_api/models/source_whisky_hunter.py - src/airbyte_api/models/source_wikipedia_pageviews.py - src/airbyte_api/models/source_woocommerce.py @@ -2450,7 +2551,6 @@ generatedFiles: - src/airbyte_api/models/source_yotpo.py - src/airbyte_api/models/source_youtube_analytics.py - src/airbyte_api/models/source_zendesk_chat.py - - src/airbyte_api/models/source_zendesk_sell.py - src/airbyte_api/models/source_zendesk_sunshine.py - src/airbyte_api/models/source_zendesk_support.py - src/airbyte_api/models/source_zendesk_talk.py @@ -2549,16 +2649,14 @@ examples: requestBody: application/json: configuration: - embedding: - openai_key: indexing: index: - pinecone_environment: us-west1-gcp + pinecone_environment: gcp-starter pinecone_key: processing: - chunk_size: 45493 + chunk_size: 25368 metadata_fields: - - user + - user.name text_fields: - user.name name: Postgres @@ -2567,16 +2665,20 @@ examples: "200": application/json: configuration: - corpus_name: - customer_id: - metadata_fields: - - user - oauth2: - client_id: - client_secret: - text_fields: - - users.*.name - title_field: document_key + indexing: + credentials: + password: AIRBYTE_PASSWORD + database: AIRBYTE_DATABASE + default_schema: AIRBYTE_SCHEMA + host: AIRBYTE_ACCOUNT + port: "5432" + username: AIRBYTE_USER + processing: + chunk_size: 988471 + metadata_fields: + - age + text_fields: + - user.name destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: postgres name: Analytics Team Postgres @@ -2585,18 +2687,30 @@ examples: requestBody: application/json: configuration: - credentials: - client_id: - client_secret: - refresh_token: - spreadsheet_id: https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit + project_id: name: workspaceId: ad46e8e6-2f62-408e-9ba5-019ef3492fc0 responses: "200": application/json: configuration: - project_id: + database: + host: bare-nightlife.org + password: 1kxEg7h49Vv5Y0Q + port: "5439" + schema: public + tunnel_method: + tunnel_host: + tunnel_port: "22" + tunnel_user: + tunnel_user_password: + uploading_method: + access_key_id: + file_name_pattern: '{part_number}' + s3_bucket_name: airbyte.staging + s3_bucket_path: data_sync/test + secret_access_key: + username: Alivia60 destinationId: af0c3c67-aa61-419f-8922-95b0bf840e86 destinationType: name: @@ -2669,20 +2783,34 @@ examples: requestBody: application/json: configuration: - access_token: - consumer_key: - since: "2022-10-20 14:14:14" + custom_reports: + - columns: [] + name: + start_date: "2022-07-28" + - columns: [] + name: + start_date: "2022-07-28" + - columns: + - TOTAL_WEB_ENGAGEMENT_CHECKOUT + - TOTAL_VIEW_LEAD + - TOTAL_ENGAGEMENT_CHECKOUT + name: + start_date: "2022-07-28" + start_date: "2022-07-28" name: My Source workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 responses: "200": application/json: configuration: - client_id: - client_secret: - end_date: "2022-01-30" - refresh_token: - start_date: "2022-01-01" + credentials: + client_id: + client_secret: + tenant_id: + start_date: "2021-01-01T00:00:00.000000Z" + streams: + - name: + - name: name: Analytics Team Postgres sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 sourceType: postgres @@ -2691,7 +2819,9 @@ examples: requestBody: application/json: configuration: - api_token: + api_key: + end_date: "2024-03-01T00:00:00Z" + start_date: "2022-03-01T00:00:00Z" name: workspaceId: cf0f31f3-ddc9-4848-834b-dfb109056aa6 responses: @@ -2699,8 +2829,8 @@ examples: application/json: configuration: credentials: - access_token: - start_date: "2017-01-25T00:00:00Z" + service_account_info: '{ "type": "service_account", "project_id": YOUR_PROJECT_ID, "private_key_id": YOUR_PRIVATE_KEY, ... }' + spreadsheet_id: https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit name: sourceId: 0c31738c-0b2d-4887-b506-e2cd1c39cc35 sourceType: @@ -2774,11 +2904,22 @@ examples: "200": application/json: configuration: - host: puzzling-ceiling.org - port: "1521" - schema: airbyte - sid: - username: Golda.Deckow + embedding: + openai_key: + indexing: + credentials: + password: AIRBYTE_PASSWORD + database: AIRBYTE_DATABASE + default_schema: AIRBYTE_SCHEMA + host: AIRBYTE_ACCOUNT + port: "5432" + username: AIRBYTE_USER + processing: + chunk_size: 102547 + metadata_fields: + - age + text_fields: + - text destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: name: My Destination @@ -2817,8 +2958,6 @@ examples: responses: "200": application/json: - configuration: - api_key: name: running sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 sourceType: postgres @@ -2869,13 +3008,13 @@ examples: schedule: scheduleType: basic sourceId: - status: inactive + status: deprecated workspaceId: - connectionId: destinationId: name: schedule: - scheduleType: basic + scheduleType: manual sourceId: 49237019-645d-47d4-b45b-5eddf97775ce status: inactive workspaceId: @@ -2883,7 +3022,7 @@ examples: destinationId: al312fs-0ab1-4f72-9ed7-0b8fc27c5826 name: schedule: - scheduleType: basic + scheduleType: cron sourceId: status: inactive workspaceId: @@ -2893,13 +3032,13 @@ examples: schedule: scheduleType: manual sourceId: - status: active + status: deprecated workspaceId: - connectionId: destinationId: name: schedule: - scheduleType: manual + scheduleType: basic sourceId: status: active workspaceId: @@ -2908,9 +3047,9 @@ examples: destinationId: name: schedule: - scheduleType: cron + scheduleType: manual sourceId: - status: active + status: inactive workspaceId: next: https://api.airbyte.com/v1/connections?limit=5&offset=10 previous: https://api.airbyte.com/v1/connections?limit=5&offset=0 @@ -2922,10 +3061,10 @@ examples: data: - configuration: destination_path: /json_data - host: cluttered-octet.name - password: gVR2L2ho1Gzf99L + host: phony-atrium.name + password: R2h1z9Lo5EEb5Ho port: 22 - username: Zoey.Cormier61 + username: Favian.Langworth destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: postgres name: Analytics Team Postgres @@ -2965,7 +3104,7 @@ examples: application/json: data: - connectionId: - jobId: 417148 + jobId: 4995 jobType: sync startTime: status: running @@ -2995,7 +3134,6 @@ examples: data: - configuration: api_token: - start_date: "2022-11-08T21:10:51.424Z" name: Analytics Team Postgres sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 sourceType: postgres @@ -3067,26 +3205,24 @@ examples: destinationId: requestBody: application/json: - configuration: {} + configuration: + destination_path: 'motherduck:' + schema: main name: My Destination responses: "200": application/json: configuration: - embedding: - openai_key: - indexing: - auth: - password: SYdhwRSRa3byd7R - username: Precious_Bashirian - collection: - host: tcp://host.docker.internal:19530 - processing: - chunk_size: 393794 - metadata_fields: - - user.name - text_fields: - - text + database: + host: disloyal-lox.net + port: "5432" + schema: public + tunnel_method: + tunnel_host: + tunnel_port: "22" + tunnel_user: + tunnel_user_password: + username: Velva.Yundt67 destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: postgres name: Analytics Team Postgres @@ -3098,14 +3234,19 @@ examples: requestBody: application/json: configuration: - api_key: - host: thoughtful-seeker.org + authentication: + personal_access_token: + database: + hostname: abc-12345678-wxyz.cloud.databricks.com + http_path: sql/1.0/warehouses/0000-1111111-abcd90 + port: "443" + schema: default responses: "200": application/json: configuration: - api_key: - host: generous-biosphere.org + destination_path: 'motherduck:' + schema: main destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: name: running @@ -3118,9 +3259,7 @@ examples: requestBody: application/json: configuration: - access_key: - base: EUR - start_date: YYYY-MM-DD + api_key: name: My Source workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 responses: @@ -3128,8 +3267,9 @@ examples: application/json: configuration: auth_token: - organization: - project: + counter_id: + end_date: "2022-01-01" + start_date: "2022-01-01" name: Analytics Team Postgres sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 sourceType: postgres @@ -3151,7 +3291,9 @@ examples: application/json: configuration: api_key: - start_date: dd/mm/YYYY HH:MM + data_type: latest + symbols: + - BTC name: running sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 sourceType: postgres @@ -3165,32 +3307,34 @@ examples: application/json: configuration: database: - host: aggravating-town.info + host: urban-receptor.org port: "8123" tunnel_method: + ssh_key: tunnel_host: tunnel_port: "22" tunnel_user: - tunnel_user_password: - username: Roman_Wolff + username: Micheal.Wisoky name: My Destination responses: "200": application/json: configuration: - access_key_id: A012345678910EXAMPLE - file_name_pattern: '{date:yyyy_MM}' - format: - block_size_mb: 128 - dictionary_page_size_kb: 1024 - max_padding_size_mb: 8 - page_size_kb: 1024 - role_arn: arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId - s3_bucket_name: airbyte_sync - s3_bucket_path: data_sync/test - s3_endpoint: http://localhost:9000 - s3_path_format: ${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_ - secret_access_key: a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY + embedding: + openai_key: + indexing: + additional_headers: + - header_key: X-OpenAI-Api-Key + value: my-openai-api-key + auth: + token: + host: https://my-cluster.weaviate.network + processing: + chunk_size: 484762 + metadata_fields: + - age + text_fields: + - users.*.name destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: postgres name: Analytics Team Postgres @@ -3202,14 +3346,25 @@ examples: requestBody: application/json: configuration: - project_id: + endpoint: + tunnel_method: + ssh_key: + tunnel_host: + tunnel_port: "22" + tunnel_user: name: responses: "200": application/json: configuration: - api_key: - host: wide-cabin.info + access_key_id: A012345678910EXAMPLE + file_name_pattern: '{timestamp}' + role_arn: arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId + s3_bucket_name: airbyte_sync + s3_bucket_path: data_sync/test + s3_endpoint: http://localhost:9000 + s3_path_format: ${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_ + secret_access_key: a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 destinationType: name: running @@ -3221,14 +3376,19 @@ examples: sourceId: requestBody: application/json: + configuration: + api_key: + start_date: "2024-10-11T13:59:33.977Z" name: My Source responses: "200": application/json: configuration: - accept_version: 1.0.0 - api_key: a very long hex sequence - site_id: a relatively long hex sequence + lwa_app_id: + lwa_client_secret: + refresh_token: + replication_end_date: "2017-01-25T00:00:00Z" + replication_start_date: "2017-01-25T00:00:00Z" name: Analytics Team Postgres sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 sourceType: postgres @@ -3240,11 +3400,12 @@ examples: requestBody: application/json: configuration: - lwa_app_id: - lwa_client_secret: - refresh_token: - replication_end_date: "2017-01-25T00:00:00Z" - replication_start_date: "2017-01-25T00:00:00Z" + aws_key_id: + aws_secret_key: + lookup_attributes_filter: + attribute_key: EventName + attribute_value: ListInstanceAssociations + start_date: "2021-01-01" name: responses: "200": diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index a74cff89..bb39407a 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -2,8 +2,8 @@ speakeasyVersion: 1.291.0 sources: my-source: sourceNamespace: my-source - sourceRevisionDigest: sha256:815e87000d947fb5b9e40d9c548c1172ea6fdf12d93fca9f04e78074753d0c76 - sourceBlobDigest: sha256:3afb05187c88ef1e6d5a664247d2f4e41b4eb638ddacbce44599e4314d2854b9 + sourceRevisionDigest: sha256:4cbacce549695d61a4b6fcd07742e8eb25c528061ad1db3e002239fcea2956fa + sourceBlobDigest: sha256:8745bd06d95a1f1a7073a8a702c4bd4db2c5967d58f562d11de4f89ac7f872f5 tags: - latest - main @@ -11,8 +11,8 @@ targets: python-api: source: my-source sourceNamespace: my-source - sourceRevisionDigest: sha256:815e87000d947fb5b9e40d9c548c1172ea6fdf12d93fca9f04e78074753d0c76 - sourceBlobDigest: sha256:3afb05187c88ef1e6d5a664247d2f4e41b4eb638ddacbce44599e4314d2854b9 + sourceRevisionDigest: sha256:4cbacce549695d61a4b6fcd07742e8eb25c528061ad1db3e002239fcea2956fa + sourceBlobDigest: sha256:8745bd06d95a1f1a7073a8a702c4bd4db2c5967d58f562d11de4f89ac7f872f5 outLocation: /github/workspace/repo workflow: workflowVersion: 1.0.0 diff --git a/README.md b/README.md index 14b7659f..129aede8 100755 --- a/README.md +++ b/README.md @@ -77,6 +77,10 @@ if res.connection_response is not None: ## Available Resources and Operations +
+Available methods + + ### [connections](docs/sdks/connections/README.md) * [create_connection](docs/sdks/connections/README.md#create_connection) - Create a connection @@ -143,6 +147,8 @@ if res.connection_response is not None: * [get_workspace](docs/sdks/workspaces/README.md#get_workspace) - Get Workspace details * [list_workspaces](docs/sdks/workspaces/README.md#list_workspaces) - List workspaces * [update_workspace](docs/sdks/workspaces/README.md#update_workspace) - Update a workspace + +
diff --git a/RELEASES.md b/RELEASES.md index 463d6ccc..43daf3c2 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -948,4 +948,14 @@ Based on: ### Generated - [python v0.52.0] . ### Releases -- [PyPI v0.52.0] https://pypi.org/project/airbyte-api/0.52.0 - . \ No newline at end of file +- [PyPI v0.52.0] https://pypi.org/project/airbyte-api/0.52.0 - . + +## 2024-09-25 00:17:36 +### Changes +Based on: +- OpenAPI Doc +- Speakeasy CLI 1.402.12 (2.422.15) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v0.52.1] . +### Releases +- [PyPI v0.52.1] https://pypi.org/project/airbyte-api/0.52.1 - . \ No newline at end of file diff --git a/docs/models/apiendpoint.md b/docs/models/apiendpoint.md new file mode 100644 index 00000000..56265eb3 --- /dev/null +++ b/docs/models/apiendpoint.md @@ -0,0 +1,17 @@ +# APIEndpoint + + +## Supported Types + +### `models.Basic` + +```python +value: models.Basic = /* values here */ +``` + +### `models.Enterprise` + +```python +value: models.Enterprise = /* values here */ +``` + diff --git a/docs/models/apikeysecret.md b/docs/models/apikeysecret.md index 89de7a8c..bc7318ba 100644 --- a/docs/models/apikeysecret.md +++ b/docs/models/apikeysecret.md @@ -5,8 +5,8 @@ Use a api key and secret combination to authenticate ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `api_key_id` | *str* | :heavy_check_mark: | The Key ID to used when accessing an enterprise Elasticsearch instance. | -| `api_key_secret` | *str* | :heavy_check_mark: | The secret associated with the API Key ID. | -| `method` | [models.DestinationElasticsearchMethod](../models/destinationelasticsearchmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| `api_key_id` | *str* | :heavy_check_mark: | The Key ID to used when accessing an enterprise Elasticsearch instance. | +| `api_key_secret` | *str* | :heavy_check_mark: | The secret associated with the API Key ID. | +| `method` | [models.DestinationElasticsearchSchemasMethod](../models/destinationelasticsearchschemasmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/apiserver.md b/docs/models/apiserver.md new file mode 100644 index 00000000..2a99fc2a --- /dev/null +++ b/docs/models/apiserver.md @@ -0,0 +1,9 @@ +# APIServer + + +## Values + +| Name | Value | +| ----- | ----- | +| `US` | us | +| `EU` | eu | \ No newline at end of file diff --git a/docs/models/appcues.md b/docs/models/appcues.md new file mode 100644 index 00000000..a08cc56b --- /dev/null +++ b/docs/models/appcues.md @@ -0,0 +1,8 @@ +# Appcues + + +## Values + +| Name | Value | +| --------- | --------- | +| `APPCUES` | appcues | \ No newline at end of file diff --git a/docs/models/appfigures.md b/docs/models/appfigures.md new file mode 100644 index 00000000..5f51212d --- /dev/null +++ b/docs/models/appfigures.md @@ -0,0 +1,8 @@ +# Appfigures + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `APPFIGURES` | appfigures | \ No newline at end of file diff --git a/docs/models/authenticationmethod.md b/docs/models/authenticationmethod.md index 9ffe7611..af737103 100644 --- a/docs/models/authenticationmethod.md +++ b/docs/models/authenticationmethod.md @@ -5,6 +5,12 @@ The type of authentication to be used ## Supported Types +### `models.NoneT` + +```python +value: models.NoneT = /* values here */ +``` + ### `models.APIKeySecret` ```python diff --git a/docs/models/authorizationtype.md b/docs/models/authorizationtype.md index e7c767cf..c4001dc7 100644 --- a/docs/models/authorizationtype.md +++ b/docs/models/authorizationtype.md @@ -5,10 +5,10 @@ Authorization type. ## Supported Types -### `models.NoneT` +### `models.DestinationMongodbNone` ```python -value: models.NoneT = /* values here */ +value: models.DestinationMongodbNone = /* values here */ ``` ### `models.LoginPassword` diff --git a/docs/models/avroformat.md b/docs/models/avroformat.md index 03d94531..ff263867 100644 --- a/docs/models/avroformat.md +++ b/docs/models/avroformat.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `double_as_string` | *Optional[bool]* | :heavy_minus_sign: | Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. | -| `filetype` | [Optional[models.SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype]](../models/sourceazureblobstorageschemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `filetype` | [Optional[models.Filetype]](../models/filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/baseurlprefix.md b/docs/models/baseurlprefix.md new file mode 100644 index 00000000..91df7bce --- /dev/null +++ b/docs/models/baseurlprefix.md @@ -0,0 +1,12 @@ +# BaseURLPrefix + +You can access our API through the following URLs - Standard API Usage (Use the default API URL - https://api.jotform.com), For EU (Use the EU API URL - https://eu-api.jotform.com), For HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com) + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `STANDARD` | Standard | +| `EU` | EU | +| `HIPAA` | HIPAA | \ No newline at end of file diff --git a/docs/models/basic.md b/docs/models/basic.md new file mode 100644 index 00000000..7749698f --- /dev/null +++ b/docs/models/basic.md @@ -0,0 +1,9 @@ +# Basic + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_endpoint` | [Optional[models.SourceJotformAPIEndpoint]](../models/sourcejotformapiendpoint.md) | :heavy_minus_sign: | N/A | +| `url_prefix` | [Optional[models.BaseURLPrefix]](../models/baseurlprefix.md) | :heavy_minus_sign: | You can access our API through the following URLs - Standard API Usage (Use the default API URL - https://api.jotform.com), For EU (Use the EU API URL - https://eu-api.jotform.com), For HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com) | \ No newline at end of file diff --git a/docs/models/sources3schemasformatfiletype.md b/docs/models/bitly.md similarity index 51% rename from docs/models/sources3schemasformatfiletype.md rename to docs/models/bitly.md index 6e89b817..76228a6a 100644 --- a/docs/models/sources3schemasformatfiletype.md +++ b/docs/models/bitly.md @@ -1,8 +1,8 @@ -# SourceS3SchemasFormatFiletype +# Bitly ## Values | Name | Value | | ------- | ------- | -| `JSONL` | jsonl | \ No newline at end of file +| `BITLY` | bitly | \ No newline at end of file diff --git a/docs/models/brevo.md b/docs/models/brevo.md new file mode 100644 index 00000000..75f5d262 --- /dev/null +++ b/docs/models/brevo.md @@ -0,0 +1,8 @@ +# Brevo + + +## Values + +| Name | Value | +| ------- | ------- | +| `BREVO` | brevo | \ No newline at end of file diff --git a/docs/models/buildkite.md b/docs/models/buildkite.md new file mode 100644 index 00000000..2e4922fd --- /dev/null +++ b/docs/models/buildkite.md @@ -0,0 +1,8 @@ +# Buildkite + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `BUILDKITE` | buildkite | \ No newline at end of file diff --git a/docs/models/buzzsprout.md b/docs/models/buzzsprout.md new file mode 100644 index 00000000..a43fc382 --- /dev/null +++ b/docs/models/buzzsprout.md @@ -0,0 +1,8 @@ +# Buzzsprout + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `BUZZSPROUT` | buzzsprout | \ No newline at end of file diff --git a/docs/models/canny.md b/docs/models/canny.md new file mode 100644 index 00000000..7f65b9f0 --- /dev/null +++ b/docs/models/canny.md @@ -0,0 +1,8 @@ +# Canny + + +## Values + +| Name | Value | +| ------- | ------- | +| `CANNY` | canny | \ No newline at end of file diff --git a/docs/models/chameleon.md b/docs/models/chameleon.md new file mode 100644 index 00000000..b0494ac4 --- /dev/null +++ b/docs/models/chameleon.md @@ -0,0 +1,8 @@ +# Chameleon + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `CHAMELEON` | chameleon | \ No newline at end of file diff --git a/docs/models/cimis.md b/docs/models/cimis.md new file mode 100644 index 00000000..0c9cb1dd --- /dev/null +++ b/docs/models/cimis.md @@ -0,0 +1,8 @@ +# Cimis + + +## Values + +| Name | Value | +| ------- | ------- | +| `CIMIS` | cimis | \ No newline at end of file diff --git a/docs/models/continuousfeed.md b/docs/models/continuousfeed.md deleted file mode 100644 index 84255bd8..00000000 --- a/docs/models/continuousfeed.md +++ /dev/null @@ -1,14 +0,0 @@ -# ContinuousFeed - - -## Fields - -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | -| `mock_catalog` | [models.MockCatalog](../models/mockcatalog.md) | :heavy_check_mark: | N/A | | -| `additional_properties` | Dict[str, *Any*] | :heavy_minus_sign: | N/A | {
"user": "charles"
} | -| `max_messages` | *Optional[int]* | :heavy_minus_sign: | Number of records to emit per stream. Min 1. Max 100 billion. | | -| `message_interval_ms` | *Optional[int]* | :heavy_minus_sign: | Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute). | | -| `seed` | *Optional[int]* | :heavy_minus_sign: | When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000]. | 42 | -| `source_type` | [Optional[models.E2eTestCloud]](../models/e2etestcloud.md) | :heavy_minus_sign: | N/A | | -| `type` | [Optional[models.Type]](../models/type.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/csvformat.md b/docs/models/csvformat.md index 5ae2ca00..af7eb90a 100644 --- a/docs/models/csvformat.md +++ b/docs/models/csvformat.md @@ -13,7 +13,6 @@ | `filetype` | [Optional[models.SourceAzureBlobStorageFiletype]](../models/sourceazureblobstoragefiletype.md) | :heavy_minus_sign: | N/A | | `header_definition` | [Optional[models.CSVHeaderDefinition]](../models/csvheaderdefinition.md) | :heavy_minus_sign: | How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. | | `ignore_errors_on_fields_mismatch` | *Optional[bool]* | :heavy_minus_sign: | Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. | -| `inference_type` | [Optional[models.InferenceType]](../models/inferencetype.md) | :heavy_minus_sign: | How to infer the types of the columns. If none, inference default to strings. | | `null_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. | | `quote_char` | *Optional[str]* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | | `skip_rows_after_header` | *Optional[int]* | :heavy_minus_sign: | The number of rows to skip after the header row. | diff --git a/docs/models/destinationclickhouse.md b/docs/models/destinationclickhouse.md index 92aaed7c..d52d9270 100644 --- a/docs/models/destinationclickhouse.md +++ b/docs/models/destinationclickhouse.md @@ -13,4 +13,5 @@ | `password` | *Optional[str]* | :heavy_minus_sign: | Password associated with the username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | HTTP port of the database. | 8123 | | `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The schema to write raw tables into (default: airbyte_internal) | | +| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. | | | `tunnel_method` | [Optional[models.SSHTunnelMethod]](../models/sshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file diff --git a/docs/models/destinationconfiguration.md b/docs/models/destinationconfiguration.md index f713e7cb..8627b8f1 100644 --- a/docs/models/destinationconfiguration.md +++ b/docs/models/destinationconfiguration.md @@ -53,12 +53,6 @@ value: models.DestinationConvex = /* values here */ value: models.DestinationDatabricks = /* values here */ ``` -### `models.DestinationDevNull` - -```python -value: models.DestinationDevNull = /* values here */ -``` - ### `models.DestinationDuckdb` ```python @@ -125,6 +119,12 @@ value: models.DestinationMysql = /* values here */ value: models.DestinationOracle = /* values here */ ``` +### `models.DestinationPgvector` + +```python +value: models.DestinationPgvector = /* values here */ +``` + ### `models.DestinationPinecone` ```python diff --git a/docs/models/destinationdevnull.md b/docs/models/destinationdevnull.md deleted file mode 100644 index f1246b16..00000000 --- a/docs/models/destinationdevnull.md +++ /dev/null @@ -1,9 +0,0 @@ -# DestinationDevNull - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | ------------------------------------------------------ | -| `test_destination` | [models.TestDestination](../models/testdestination.md) | :heavy_check_mark: | The type of destination to be used | -| `destination_type` | [models.DevNull](../models/devnull.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/destinationelasticsearch.md b/docs/models/destinationelasticsearch.md index b540f52e..1993f52a 100644 --- a/docs/models/destinationelasticsearch.md +++ b/docs/models/destinationelasticsearch.md @@ -9,4 +9,5 @@ | `authentication_method` | [Optional[models.AuthenticationMethod]](../models/authenticationmethod.md) | :heavy_minus_sign: | The type of authentication to be used | | `ca_certificate` | *Optional[str]* | :heavy_minus_sign: | CA certificate | | `destination_type` | [models.Elasticsearch](../models/elasticsearch.md) | :heavy_check_mark: | N/A | +| `tunnel_method` | [Optional[models.DestinationElasticsearchSSHTunnelMethod]](../models/destinationelasticsearchsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | `upsert` | *Optional[bool]* | :heavy_minus_sign: | If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys. | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchmethod.md b/docs/models/destinationelasticsearchmethod.md index b1cc2059..3b76c994 100644 --- a/docs/models/destinationelasticsearchmethod.md +++ b/docs/models/destinationelasticsearchmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| -------- | -------- | -| `SECRET` | secret | \ No newline at end of file +| Name | Value | +| ------ | ------ | +| `NONE` | none | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchnotunnel.md b/docs/models/destinationelasticsearchnotunnel.md new file mode 100644 index 00000000..33bd3b5e --- /dev/null +++ b/docs/models/destinationelasticsearchnotunnel.md @@ -0,0 +1,8 @@ +# DestinationElasticsearchNoTunnel + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `tunnel_method` | [models.DestinationElasticsearchTunnelMethod](../models/destinationelasticsearchtunnelmethod.md) | :heavy_check_mark: | No ssh tunnel needed to connect to database | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchpasswordauthentication.md b/docs/models/destinationelasticsearchpasswordauthentication.md new file mode 100644 index 00000000..6956b228 --- /dev/null +++ b/docs/models/destinationelasticsearchpasswordauthentication.md @@ -0,0 +1,12 @@ +# DestinationElasticsearchPasswordAuthentication + + +## Fields + +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host | | +| `tunnel_user_password` | *str* | :heavy_check_mark: | OS-level password for logging into the jump server host | | +| `tunnel_method` | [models.DestinationElasticsearchSchemasTunnelMethodTunnelMethod](../models/destinationelasticsearchschemastunnelmethodtunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and password authentication | | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchschemasauthenticationmethodmethod.md b/docs/models/destinationelasticsearchschemasauthenticationmethodmethod.md new file mode 100644 index 00000000..d8fa0857 --- /dev/null +++ b/docs/models/destinationelasticsearchschemasauthenticationmethodmethod.md @@ -0,0 +1,8 @@ +# DestinationElasticsearchSchemasAuthenticationMethodMethod + + +## Values + +| Name | Value | +| ------- | ------- | +| `BASIC` | basic | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchschemasmethod.md b/docs/models/destinationelasticsearchschemasmethod.md index 721349b5..a8912974 100644 --- a/docs/models/destinationelasticsearchschemasmethod.md +++ b/docs/models/destinationelasticsearchschemasmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------- | ------- | -| `BASIC` | basic | \ No newline at end of file +| Name | Value | +| -------- | -------- | +| `SECRET` | secret | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchschemastunnelmethod.md b/docs/models/destinationelasticsearchschemastunnelmethod.md new file mode 100644 index 00000000..f2287c1c --- /dev/null +++ b/docs/models/destinationelasticsearchschemastunnelmethod.md @@ -0,0 +1,10 @@ +# DestinationElasticsearchSchemasTunnelMethod + +Connect through a jump server tunnel host using username and ssh key + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `SSH_KEY_AUTH` | SSH_KEY_AUTH | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchschemastunnelmethodtunnelmethod.md b/docs/models/destinationelasticsearchschemastunnelmethodtunnelmethod.md new file mode 100644 index 00000000..917b6782 --- /dev/null +++ b/docs/models/destinationelasticsearchschemastunnelmethodtunnelmethod.md @@ -0,0 +1,10 @@ +# DestinationElasticsearchSchemasTunnelMethodTunnelMethod + +Connect through a jump server tunnel host using username and password authentication + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `SSH_PASSWORD_AUTH` | SSH_PASSWORD_AUTH | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchsshkeyauthentication.md b/docs/models/destinationelasticsearchsshkeyauthentication.md new file mode 100644 index 00000000..a94bd4cc --- /dev/null +++ b/docs/models/destinationelasticsearchsshkeyauthentication.md @@ -0,0 +1,12 @@ +# DestinationElasticsearchSSHKeyAuthentication + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | +| `ssh_key` | *str* | :heavy_check_mark: | OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa ) | | +| `tunnel_host` | *str* | :heavy_check_mark: | Hostname of the jump server host that allows inbound ssh tunnel. | | +| `tunnel_user` | *str* | :heavy_check_mark: | OS-level username for logging into the jump server host. | | +| `tunnel_method` | [models.DestinationElasticsearchSchemasTunnelMethod](../models/destinationelasticsearchschemastunnelmethod.md) | :heavy_check_mark: | Connect through a jump server tunnel host using username and ssh key | | +| `tunnel_port` | *Optional[int]* | :heavy_minus_sign: | Port on the proxy/jump server that accepts inbound ssh connections. | 22 | \ No newline at end of file diff --git a/docs/models/destinationelasticsearchsshtunnelmethod.md b/docs/models/destinationelasticsearchsshtunnelmethod.md new file mode 100644 index 00000000..23edec9b --- /dev/null +++ b/docs/models/destinationelasticsearchsshtunnelmethod.md @@ -0,0 +1,25 @@ +# DestinationElasticsearchSSHTunnelMethod + +Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. + + +## Supported Types + +### `models.DestinationElasticsearchNoTunnel` + +```python +value: models.DestinationElasticsearchNoTunnel = /* values here */ +``` + +### `models.DestinationElasticsearchSSHKeyAuthentication` + +```python +value: models.DestinationElasticsearchSSHKeyAuthentication = /* values here */ +``` + +### `models.DestinationElasticsearchPasswordAuthentication` + +```python +value: models.DestinationElasticsearchPasswordAuthentication = /* values here */ +``` + diff --git a/docs/models/destinationelasticsearchtunnelmethod.md b/docs/models/destinationelasticsearchtunnelmethod.md new file mode 100644 index 00000000..15d6f451 --- /dev/null +++ b/docs/models/destinationelasticsearchtunnelmethod.md @@ -0,0 +1,10 @@ +# DestinationElasticsearchTunnelMethod + +No ssh tunnel needed to connect to database + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `NO_TUNNEL` | NO_TUNNEL | \ No newline at end of file diff --git a/docs/models/destinationmongodbnone.md b/docs/models/destinationmongodbnone.md new file mode 100644 index 00000000..a1a7e08b --- /dev/null +++ b/docs/models/destinationmongodbnone.md @@ -0,0 +1,10 @@ +# DestinationMongodbNone + +None. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| `authorization` | [models.DestinationMongodbSchemasAuthorization](../models/destinationmongodbschemasauthorization.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/destinationmssqlschemassslmethod.md b/docs/models/destinationmssqlschemassslmethod.md index c1b7d524..d28b0cb1 100644 --- a/docs/models/destinationmssqlschemassslmethod.md +++ b/docs/models/destinationmssqlschemassslmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------------------ | ------------------------------ | -| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file +| Name | Value | +| ------------------------------------ | ------------------------------------ | +| `ENCRYPTED_TRUST_SERVER_CERTIFICATE` | encrypted_trust_server_certificate | \ No newline at end of file diff --git a/docs/models/destinationmssqlschemassslmethodsslmethod.md b/docs/models/destinationmssqlschemassslmethodsslmethod.md new file mode 100644 index 00000000..35e48db6 --- /dev/null +++ b/docs/models/destinationmssqlschemassslmethodsslmethod.md @@ -0,0 +1,8 @@ +# DestinationMssqlSchemasSslMethodSslMethod + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/destinationmssqlsslmethod.md b/docs/models/destinationmssqlsslmethod.md index c7c6ee30..9d6bfded 100644 --- a/docs/models/destinationmssqlsslmethod.md +++ b/docs/models/destinationmssqlsslmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------------------------ | ------------------------------------ | -| `ENCRYPTED_TRUST_SERVER_CERTIFICATE` | encrypted_trust_server_certificate | \ No newline at end of file +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/destinationmysql.md b/docs/models/destinationmysql.md index 88057cb3..22857530 100644 --- a/docs/models/destinationmysql.md +++ b/docs/models/destinationmysql.md @@ -14,4 +14,5 @@ | `password` | *Optional[str]* | :heavy_minus_sign: | Password associated with the username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database. | 3306 | | `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The database to write raw tables into | | +| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. | | | `tunnel_method` | [Optional[models.DestinationMysqlSSHTunnelMethod]](../models/destinationmysqlsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file diff --git a/docs/models/destinationoracle.md b/docs/models/destinationoracle.md index bf1e8da9..c4f09357 100644 --- a/docs/models/destinationoracle.md +++ b/docs/models/destinationoracle.md @@ -9,6 +9,7 @@ | `sid` | *str* | :heavy_check_mark: | The System Identifier uniquely distinguishes the instance from any other instance on the same computer. | | | `username` | *str* | :heavy_check_mark: | The username to access the database. This user must have CREATE USER privileges in the database. | | | `destination_type` | [models.Oracle](../models/oracle.md) | :heavy_check_mark: | N/A | | +| `encryption` | [Optional[models.Encryption]](../models/encryption.md) | :heavy_minus_sign: | The encryption method which is used when communicating with the database. | | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | The port of the database. | 1521 | diff --git a/docs/models/destinationoracleencryptionmethod.md b/docs/models/destinationoracleencryptionmethod.md new file mode 100644 index 00000000..c1886c79 --- /dev/null +++ b/docs/models/destinationoracleencryptionmethod.md @@ -0,0 +1,8 @@ +# DestinationOracleEncryptionMethod + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `CLIENT_NNE` | client_nne | \ No newline at end of file diff --git a/docs/models/destinationoracleschemasencryptionmethod.md b/docs/models/destinationoracleschemasencryptionmethod.md new file mode 100644 index 00000000..f543de02 --- /dev/null +++ b/docs/models/destinationoracleschemasencryptionmethod.md @@ -0,0 +1,8 @@ +# DestinationOracleSchemasEncryptionMethod + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/destinationoracleunencrypted.md b/docs/models/destinationoracleunencrypted.md new file mode 100644 index 00000000..d30069d2 --- /dev/null +++ b/docs/models/destinationoracleunencrypted.md @@ -0,0 +1,10 @@ +# DestinationOracleUnencrypted + +Data transfer will not be encrypted. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | ------------------------------------------------------------------ | +| `encryption_method` | [Optional[models.EncryptionMethod]](../models/encryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/singleschema.md b/docs/models/destinationpgvector.md similarity index 54% rename from docs/models/singleschema.md rename to docs/models/destinationpgvector.md index 5a38f24d..f5dc4e2c 100644 --- a/docs/models/singleschema.md +++ b/docs/models/destinationpgvector.md @@ -1,13 +1,23 @@ -# SingleSchema +# DestinationPgvector -A catalog with one or multiple streams that share the same schema. +The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, +as well as to provide type safety for the configuration passed to the destination. + +The configuration model is composed of four parts: +* Processing configuration +* Embedding configuration +* Indexing configuration +* Advanced configuration + +Processing, embedding and advanced configuration are provided by this base class, while the indexing configuration is provided by the destination connector in the sub class. ## Fields | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `stream_duplication` | *Optional[int]* | :heavy_minus_sign: | Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is "ds", the duplicated streams will be "ds_0", "ds_1", etc. | -| `stream_name` | *Optional[str]* | :heavy_minus_sign: | Name of the data stream. | -| `stream_schema` | *Optional[str]* | :heavy_minus_sign: | A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples. | -| `type` | [Optional[models.SourceE2eTestCloudSchemasType]](../models/sourcee2etestcloudschemastype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `embedding` | [models.DestinationPgvectorEmbedding](../models/destinationpgvectorembedding.md) | :heavy_check_mark: | Embedding configuration | +| `indexing` | [models.PostgresConnection](../models/postgresconnection.md) | :heavy_check_mark: | Postgres can be used to store vector data and retrieve embeddings. | +| `processing` | [models.DestinationPgvectorProcessingConfigModel](../models/destinationpgvectorprocessingconfigmodel.md) | :heavy_check_mark: | N/A | +| `destination_type` | [models.Pgvector](../models/pgvector.md) | :heavy_check_mark: | N/A | +| `omit_raw_text` | *Optional[bool]* | :heavy_minus_sign: | Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. | \ No newline at end of file diff --git a/docs/models/destinationpgvectorazureopenai.md b/docs/models/destinationpgvectorazureopenai.md new file mode 100644 index 00000000..fe2509a8 --- /dev/null +++ b/docs/models/destinationpgvectorazureopenai.md @@ -0,0 +1,13 @@ +# DestinationPgvectorAzureOpenAI + +Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. + + +## Fields + +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | +| `api_base` | *str* | :heavy_check_mark: | The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | https://your-resource-name.openai.azure.com | +| `deployment` | *str* | :heavy_check_mark: | The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | your-resource-name | +| `openai_key` | *str* | :heavy_check_mark: | The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource | | +| `mode` | [Optional[models.DestinationPgvectorSchemasEmbeddingEmbeddingMode]](../models/destinationpgvectorschemasembeddingembeddingmode.md) | :heavy_minus_sign: | N/A | | \ No newline at end of file diff --git a/docs/models/destinationpgvectorbymarkdownheader.md b/docs/models/destinationpgvectorbymarkdownheader.md new file mode 100644 index 00000000..227100c9 --- /dev/null +++ b/docs/models/destinationpgvectorbymarkdownheader.md @@ -0,0 +1,11 @@ +# DestinationPgvectorByMarkdownHeader + +Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | +| `mode` | [Optional[models.DestinationPgvectorSchemasProcessingTextSplitterMode]](../models/destinationpgvectorschemasprocessingtextsplittermode.md) | :heavy_minus_sign: | N/A | +| `split_level` | *Optional[int]* | :heavy_minus_sign: | Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points | \ No newline at end of file diff --git a/docs/models/destinationpgvectorbyprogramminglanguage.md b/docs/models/destinationpgvectorbyprogramminglanguage.md new file mode 100644 index 00000000..e6364cc0 --- /dev/null +++ b/docs/models/destinationpgvectorbyprogramminglanguage.md @@ -0,0 +1,11 @@ +# DestinationPgvectorByProgrammingLanguage + +Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `language` | [models.DestinationPgvectorLanguage](../models/destinationpgvectorlanguage.md) | :heavy_check_mark: | Split code in suitable places based on the programming language | +| `mode` | [Optional[models.DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode]](../models/destinationpgvectorschemasprocessingtextsplittertextsplittermode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationpgvectorbyseparator.md b/docs/models/destinationpgvectorbyseparator.md new file mode 100644 index 00000000..16d44db0 --- /dev/null +++ b/docs/models/destinationpgvectorbyseparator.md @@ -0,0 +1,12 @@ +# DestinationPgvectorBySeparator + +Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `keep_separator` | *Optional[bool]* | :heavy_minus_sign: | Whether to keep the separator in the resulting chunks | +| `mode` | [Optional[models.DestinationPgvectorSchemasProcessingMode]](../models/destinationpgvectorschemasprocessingmode.md) | :heavy_minus_sign: | N/A | +| `separators` | List[*str*] | :heavy_minus_sign: | List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n". | \ No newline at end of file diff --git a/docs/models/destinationpgvectorcohere.md b/docs/models/destinationpgvectorcohere.md new file mode 100644 index 00000000..b1d3defd --- /dev/null +++ b/docs/models/destinationpgvectorcohere.md @@ -0,0 +1,11 @@ +# DestinationPgvectorCohere + +Use the Cohere API to embed text. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `cohere_key` | *str* | :heavy_check_mark: | N/A | +| `mode` | [Optional[models.DestinationPgvectorSchemasMode]](../models/destinationpgvectorschemasmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationpgvectorcredentials.md b/docs/models/destinationpgvectorcredentials.md new file mode 100644 index 00000000..4a20b054 --- /dev/null +++ b/docs/models/destinationpgvectorcredentials.md @@ -0,0 +1,8 @@ +# DestinationPgvectorCredentials + + +## Fields + +| Field | Type | Required | Description | Example | +| --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | +| `password` | *str* | :heavy_check_mark: | Enter the password you want to use to access the database | AIRBYTE_PASSWORD | \ No newline at end of file diff --git a/docs/models/destinationpgvectorembedding.md b/docs/models/destinationpgvectorembedding.md new file mode 100644 index 00000000..4a0b4b21 --- /dev/null +++ b/docs/models/destinationpgvectorembedding.md @@ -0,0 +1,37 @@ +# DestinationPgvectorEmbedding + +Embedding configuration + + +## Supported Types + +### `models.DestinationPgvectorOpenAI` + +```python +value: models.DestinationPgvectorOpenAI = /* values here */ +``` + +### `models.DestinationPgvectorCohere` + +```python +value: models.DestinationPgvectorCohere = /* values here */ +``` + +### `models.DestinationPgvectorFake` + +```python +value: models.DestinationPgvectorFake = /* values here */ +``` + +### `models.DestinationPgvectorAzureOpenAI` + +```python +value: models.DestinationPgvectorAzureOpenAI = /* values here */ +``` + +### `models.DestinationPgvectorOpenAICompatible` + +```python +value: models.DestinationPgvectorOpenAICompatible = /* values here */ +``` + diff --git a/docs/models/destinationpgvectorfake.md b/docs/models/destinationpgvectorfake.md new file mode 100644 index 00000000..3e9d0f7e --- /dev/null +++ b/docs/models/destinationpgvectorfake.md @@ -0,0 +1,10 @@ +# DestinationPgvectorFake + +Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `mode` | [Optional[models.DestinationPgvectorSchemasEmbeddingMode]](../models/destinationpgvectorschemasembeddingmode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationpgvectorfieldnamemappingconfigmodel.md b/docs/models/destinationpgvectorfieldnamemappingconfigmodel.md new file mode 100644 index 00000000..a65598b0 --- /dev/null +++ b/docs/models/destinationpgvectorfieldnamemappingconfigmodel.md @@ -0,0 +1,9 @@ +# DestinationPgvectorFieldNameMappingConfigModel + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | +| `from_field` | *str* | :heavy_check_mark: | The field name in the source | +| `to_field` | *str* | :heavy_check_mark: | The field name to use in the destination | \ No newline at end of file diff --git a/docs/models/destinationpgvectorlanguage.md b/docs/models/destinationpgvectorlanguage.md new file mode 100644 index 00000000..deebb646 --- /dev/null +++ b/docs/models/destinationpgvectorlanguage.md @@ -0,0 +1,25 @@ +# DestinationPgvectorLanguage + +Split code in suitable places based on the programming language + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `CPP` | cpp | +| `GO` | go | +| `JAVA` | java | +| `JS` | js | +| `PHP` | php | +| `PROTO` | proto | +| `PYTHON` | python | +| `RST` | rst | +| `RUBY` | ruby | +| `RUST` | rust | +| `SCALA` | scala | +| `SWIFT` | swift | +| `MARKDOWN` | markdown | +| `LATEX` | latex | +| `HTML` | html | +| `SOL` | sol | \ No newline at end of file diff --git a/docs/models/destinationpgvectormode.md b/docs/models/destinationpgvectormode.md new file mode 100644 index 00000000..7bfc7ae1 --- /dev/null +++ b/docs/models/destinationpgvectormode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorMode + + +## Values + +| Name | Value | +| -------- | -------- | +| `OPENAI` | openai | \ No newline at end of file diff --git a/docs/models/destinationpgvectoropenai.md b/docs/models/destinationpgvectoropenai.md new file mode 100644 index 00000000..fce5e0f6 --- /dev/null +++ b/docs/models/destinationpgvectoropenai.md @@ -0,0 +1,11 @@ +# DestinationPgvectorOpenAI + +Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `openai_key` | *str* | :heavy_check_mark: | N/A | +| `mode` | [Optional[models.DestinationPgvectorMode]](../models/destinationpgvectormode.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/destinationpgvectoropenaicompatible.md b/docs/models/destinationpgvectoropenaicompatible.md new file mode 100644 index 00000000..9f624fe8 --- /dev/null +++ b/docs/models/destinationpgvectoropenaicompatible.md @@ -0,0 +1,14 @@ +# DestinationPgvectorOpenAICompatible + +Use a service that's compatible with the OpenAI API to embed text. + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | +| `base_url` | *str* | :heavy_check_mark: | The base URL for your OpenAI-compatible service | https://your-service-name.com | +| `dimensions` | *int* | :heavy_check_mark: | The number of dimensions the embedding model is generating | 1536 | +| `api_key` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `mode` | [Optional[models.DestinationPgvectorSchemasEmbeddingEmbedding5Mode]](../models/destinationpgvectorschemasembeddingembedding5mode.md) | :heavy_minus_sign: | N/A | | +| `model_name` | *Optional[str]* | :heavy_minus_sign: | The name of the model to use for embedding | text-embedding-ada-002 | \ No newline at end of file diff --git a/docs/models/csv.md b/docs/models/destinationpgvectorprocessingconfigmodel.md similarity index 76% rename from docs/models/csv.md rename to docs/models/destinationpgvectorprocessingconfigmodel.md index f7c8a589..b60bc4a2 100644 --- a/docs/models/csv.md +++ b/docs/models/destinationpgvectorprocessingconfigmodel.md @@ -1,20 +1,13 @@ -# Csv - -This connector utilises PyArrow (Apache Arrow) for CSV parsing. +# DestinationPgvectorProcessingConfigModel ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `additional_reader_options` | *Optional[str]* | :heavy_minus_sign: | Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems. | {"timestamp_parsers": ["%m/%d/%Y %H:%M", "%Y/%m/%d %H:%M"], "strings_can_be_null": true, "null_values": ["NA", "NULL"]} | -| `advanced_options` | *Optional[str]* | :heavy_minus_sign: | Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above. | {"column_names": ["column1", "column2"]} | -| `block_size` | *Optional[int]* | :heavy_minus_sign: | The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors. | | -| `delimiter` | *Optional[str]* | :heavy_minus_sign: | The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. | | -| `double_quote` | *Optional[bool]* | :heavy_minus_sign: | Whether two quotes in a quoted CSV value denote a single quote in the data. | | -| `encoding` | *Optional[str]* | :heavy_minus_sign: | The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. | | -| `escape_char` | *Optional[str]* | :heavy_minus_sign: | The character used for escaping special characters. To disallow escaping, leave this field blank. | | -| `filetype` | [Optional[models.Filetype]](../models/filetype.md) | :heavy_minus_sign: | N/A | | -| `infer_datatypes` | *Optional[bool]* | :heavy_minus_sign: | Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings | | -| `newlines_in_values` | *Optional[bool]* | :heavy_minus_sign: | Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False. | | -| `quote_char` | *Optional[str]* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `chunk_size` | *int* | :heavy_check_mark: | Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) | | +| `chunk_overlap` | *Optional[int]* | :heavy_minus_sign: | Size of overlap between chunks in tokens to store in vector store to better capture relevant context | | +| `field_name_mappings` | List[[models.DestinationPgvectorFieldNameMappingConfigModel](../models/destinationpgvectorfieldnamemappingconfigmodel.md)] | :heavy_minus_sign: | List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation. | | +| `metadata_fields` | List[*str*] | :heavy_minus_sign: | List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. | age | +| `text_fields` | List[*str*] | :heavy_minus_sign: | List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. | text | +| `text_splitter` | [Optional[models.DestinationPgvectorTextSplitter]](../models/destinationpgvectortextsplitter.md) | :heavy_minus_sign: | Split text fields into chunks based on the specified method. | | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasembeddingembedding5mode.md b/docs/models/destinationpgvectorschemasembeddingembedding5mode.md new file mode 100644 index 00000000..d80e0f7d --- /dev/null +++ b/docs/models/destinationpgvectorschemasembeddingembedding5mode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasEmbeddingEmbedding5Mode + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `OPENAI_COMPATIBLE` | openai_compatible | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasembeddingembeddingmode.md b/docs/models/destinationpgvectorschemasembeddingembeddingmode.md new file mode 100644 index 00000000..5e0e13a9 --- /dev/null +++ b/docs/models/destinationpgvectorschemasembeddingembeddingmode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasEmbeddingEmbeddingMode + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `AZURE_OPENAI` | azure_openai | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasembeddingmode.md b/docs/models/destinationpgvectorschemasembeddingmode.md new file mode 100644 index 00000000..466cd63a --- /dev/null +++ b/docs/models/destinationpgvectorschemasembeddingmode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasEmbeddingMode + + +## Values + +| Name | Value | +| ------ | ------ | +| `FAKE` | fake | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasmode.md b/docs/models/destinationpgvectorschemasmode.md new file mode 100644 index 00000000..3b52f46e --- /dev/null +++ b/docs/models/destinationpgvectorschemasmode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasMode + + +## Values + +| Name | Value | +| -------- | -------- | +| `COHERE` | cohere | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasprocessingmode.md b/docs/models/destinationpgvectorschemasprocessingmode.md new file mode 100644 index 00000000..1dc1fb35 --- /dev/null +++ b/docs/models/destinationpgvectorschemasprocessingmode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasProcessingMode + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `SEPARATOR` | separator | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasprocessingtextsplittermode.md b/docs/models/destinationpgvectorschemasprocessingtextsplittermode.md new file mode 100644 index 00000000..f0404679 --- /dev/null +++ b/docs/models/destinationpgvectorschemasprocessingtextsplittermode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasProcessingTextSplitterMode + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `MARKDOWN` | markdown | \ No newline at end of file diff --git a/docs/models/destinationpgvectorschemasprocessingtextsplittertextsplittermode.md b/docs/models/destinationpgvectorschemasprocessingtextsplittertextsplittermode.md new file mode 100644 index 00000000..f254d13f --- /dev/null +++ b/docs/models/destinationpgvectorschemasprocessingtextsplittertextsplittermode.md @@ -0,0 +1,8 @@ +# DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode + + +## Values + +| Name | Value | +| ------ | ------ | +| `CODE` | code | \ No newline at end of file diff --git a/docs/models/destinationpgvectortextsplitter.md b/docs/models/destinationpgvectortextsplitter.md new file mode 100644 index 00000000..eee8b2d6 --- /dev/null +++ b/docs/models/destinationpgvectortextsplitter.md @@ -0,0 +1,25 @@ +# DestinationPgvectorTextSplitter + +Split text fields into chunks based on the specified method. + + +## Supported Types + +### `models.DestinationPgvectorBySeparator` + +```python +value: models.DestinationPgvectorBySeparator = /* values here */ +``` + +### `models.DestinationPgvectorByMarkdownHeader` + +```python +value: models.DestinationPgvectorByMarkdownHeader = /* values here */ +``` + +### `models.DestinationPgvectorByProgrammingLanguage` + +```python +value: models.DestinationPgvectorByProgrammingLanguage = /* values here */ +``` + diff --git a/docs/models/destinationpostgres.md b/docs/models/destinationpostgres.md index fa5542e2..4ef23a97 100644 --- a/docs/models/destinationpostgres.md +++ b/docs/models/destinationpostgres.md @@ -16,5 +16,6 @@ | `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database. | 5432 | | `raw_data_schema` | *Optional[str]* | :heavy_minus_sign: | The schema to write raw tables into | | | `schema` | *Optional[str]* | :heavy_minus_sign: | The default schema tables are written to if the source does not specify a namespace. The usual value for this field is "public". | public | +| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. When activating SSL, please select one of the connection modes. | | | `ssl_mode` | [Optional[models.SSLModes]](../models/sslmodes.md) | :heavy_minus_sign: | SSL connection modes.
disable - Chose this mode to disable encryption of communication between Airbyte and destination database
allow - Chose this mode to enable encryption only when required by the source database
prefer - Chose this mode to allow unencrypted connection only if the source database does not support encryption
require - Chose this mode to always require encryption. If the source database server does not support encryption, connection will fail
verify-ca - Chose this mode to always require encryption and to verify that the source database server has a valid SSL certificate
verify-full - This is the most secure mode. Chose this mode to always require encryption and to verify the identity of the source database server
See more information - in the docs. | | | `tunnel_method` | [Optional[models.DestinationPostgresSSHTunnelMethod]](../models/destinationpostgressshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file diff --git a/docs/models/dimensionsfilter.md b/docs/models/dimensionsfilter.md index 8ceff45a..ed62d6cc 100644 --- a/docs/models/dimensionsfilter.md +++ b/docs/models/dimensionsfilter.md @@ -23,9 +23,9 @@ value: models.OrGroup = /* values here */ value: models.NotExpression = /* values here */ ``` -### `models.Filter` +### `models.SourceGoogleAnalyticsDataAPIFilter` ```python -value: models.Filter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIFilter = /* values here */ ``` diff --git a/docs/models/e2etestcloud.md b/docs/models/e2etestcloud.md deleted file mode 100644 index 479d1fbc..00000000 --- a/docs/models/e2etestcloud.md +++ /dev/null @@ -1,8 +0,0 @@ -# E2eTestCloud - - -## Values - -| Name | Value | -| ---------------- | ---------------- | -| `E2E_TEST_CLOUD` | e2e-test-cloud | \ No newline at end of file diff --git a/docs/models/encryptedtrustservercertificate.md b/docs/models/encryptedtrustservercertificate.md index 9d21029e..588f841b 100644 --- a/docs/models/encryptedtrustservercertificate.md +++ b/docs/models/encryptedtrustservercertificate.md @@ -5,6 +5,6 @@ Use the certificate provided by the server without verification. (For testing pu ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | -| `ssl_method` | [Optional[models.DestinationMssqlSslMethod]](../models/destinationmssqlsslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| `ssl_method` | [Optional[models.DestinationMssqlSchemasSslMethod]](../models/destinationmssqlschemassslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/encryptedverifycertificate.md b/docs/models/encryptedverifycertificate.md index afd3f3e2..7b93ad84 100644 --- a/docs/models/encryptedverifycertificate.md +++ b/docs/models/encryptedverifycertificate.md @@ -8,4 +8,4 @@ Verify and use the certificate provided by the server. | Field | Type | Required | Description | | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | | `host_name_in_certificate` | *Optional[str]* | :heavy_minus_sign: | Specifies the host name of the server. The value of this property must match the subject property of the certificate. | -| `ssl_method` | [Optional[models.DestinationMssqlSchemasSslMethod]](../models/destinationmssqlschemassslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `ssl_method` | [Optional[models.DestinationMssqlSchemasSslMethodSslMethod]](../models/destinationmssqlschemassslmethodsslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/encryption.md b/docs/models/encryption.md index 23121bd8..9156def2 100644 --- a/docs/models/encryption.md +++ b/docs/models/encryption.md @@ -1,10 +1,16 @@ # Encryption -The encryption method with is used when communicating with the database. +The encryption method which is used when communicating with the database. ## Supported Types +### `models.DestinationOracleUnencrypted` + +```python +value: models.DestinationOracleUnencrypted = /* values here */ +``` + ### `models.NativeNetworkEncryptionNNE` ```python diff --git a/docs/models/encryptionalgorithm.md b/docs/models/encryptionalgorithm.md index 440df940..de3411a5 100644 --- a/docs/models/encryptionalgorithm.md +++ b/docs/models/encryptionalgorithm.md @@ -1,6 +1,6 @@ # EncryptionAlgorithm -This parameter defines what encryption algorithm is used. +This parameter defines the database encryption algorithm. ## Values diff --git a/docs/models/encryptionmethod.md b/docs/models/encryptionmethod.md index cef12038..3fbd715c 100644 --- a/docs/models/encryptionmethod.md +++ b/docs/models/encryptionmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------ | ------------ | -| `CLIENT_NNE` | client_nne | \ No newline at end of file +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/enterprise.md b/docs/models/enterprise.md new file mode 100644 index 00000000..7ee36ebb --- /dev/null +++ b/docs/models/enterprise.md @@ -0,0 +1,9 @@ +# Enterprise + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | +| `enterprise_url` | *str* | :heavy_check_mark: | Upgrade to Enterprise to make your API url your-domain.com/API or subdomain.jotform.com/API instead of api.jotform.com | +| `api_endpoint` | [Optional[models.SourceJotformSchemasAPIEndpoint]](../models/sourcejotformschemasapiendpoint.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/expression.md b/docs/models/expression.md index 31bf85b8..1de417f5 100644 --- a/docs/models/expression.md +++ b/docs/models/expression.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `field_name` | *str* | :heavy_check_mark: | N/A | -| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `field_name` | *str* | :heavy_check_mark: | N/A | +| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/ezofficeinventory.md b/docs/models/ezofficeinventory.md new file mode 100644 index 00000000..0b0e6921 --- /dev/null +++ b/docs/models/ezofficeinventory.md @@ -0,0 +1,8 @@ +# Ezofficeinventory + + +## Values + +| Name | Value | +| ------------------- | ------------------- | +| `EZOFFICEINVENTORY` | ezofficeinventory | \ No newline at end of file diff --git a/docs/models/filebasedstreamconfig.md b/docs/models/filebasedstreamconfig.md index c2b0971e..dbe35d48 100644 --- a/docs/models/filebasedstreamconfig.md +++ b/docs/models/filebasedstreamconfig.md @@ -10,7 +10,5 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `legacy_prefix` | *Optional[str]* | :heavy_minus_sign: | The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.ValidationPolicy]](../models/validationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/filetype.md b/docs/models/filetype.md index 12eaecf8..aa6fe4cd 100644 --- a/docs/models/filetype.md +++ b/docs/models/filetype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ----- | ----- | -| `CSV` | csv | \ No newline at end of file +| Name | Value | +| ------ | ------ | +| `AVRO` | avro | \ No newline at end of file diff --git a/docs/models/filter_.md b/docs/models/filter_.md index e9cec954..709c43d5 100644 --- a/docs/models/filter_.md +++ b/docs/models/filter_.md @@ -1,12 +1,12 @@ # Filter -A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions. +Filter for using in the `segments_experiences` stream -## Fields +## Values -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `field_name` | *str* | :heavy_check_mark: | N/A | -| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasFilter](../models/sourcegoogleanalyticsdataapischemasfilter.md) | :heavy_check_mark: | N/A | -| `filter_type` | [Optional[models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType]](../models/sourcegoogleanalyticsdataapischemascustomreportsarrayfiltertype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Name | Value | +| ---------- | ---------- | +| `TOUR` | tour | +| `SURVEY` | survey | +| `LAUNCHER` | launcher | \ No newline at end of file diff --git a/docs/models/front.md b/docs/models/front.md new file mode 100644 index 00000000..390c339e --- /dev/null +++ b/docs/models/front.md @@ -0,0 +1,8 @@ +# Front + + +## Values + +| Name | Value | +| ------- | ------- | +| `FRONT` | front | \ No newline at end of file diff --git a/docs/models/zendesksell.md b/docs/models/googletasks.md similarity index 63% rename from docs/models/zendesksell.md rename to docs/models/googletasks.md index f2e4f2c1..cc22bd93 100644 --- a/docs/models/zendesksell.md +++ b/docs/models/googletasks.md @@ -1,8 +1,8 @@ -# ZendeskSell +# GoogleTasks ## Values | Name | Value | | -------------- | -------------- | -| `ZENDESK_SELL` | zendesk-sell | \ No newline at end of file +| `GOOGLE_TASKS` | google-tasks | \ No newline at end of file diff --git a/docs/models/groupby.md b/docs/models/groupby.md new file mode 100644 index 00000000..9dfc0802 --- /dev/null +++ b/docs/models/groupby.md @@ -0,0 +1,13 @@ +# GroupBy + +Category term for grouping the search results + + +## Values + +| Name | Value | +| --------- | --------- | +| `NETWORK` | network | +| `PRODUCT` | product | +| `COUNTRY` | country | +| `DATE` | date | \ No newline at end of file diff --git a/docs/models/guru.md b/docs/models/guru.md new file mode 100644 index 00000000..f7fba065 --- /dev/null +++ b/docs/models/guru.md @@ -0,0 +1,8 @@ +# Guru + + +## Values + +| Name | Value | +| ------ | ------ | +| `GURU` | guru | \ No newline at end of file diff --git a/docs/models/testdestinationtype.md b/docs/models/height.md similarity index 57% rename from docs/models/testdestinationtype.md rename to docs/models/height.md index b46e84b0..03aa097d 100644 --- a/docs/models/testdestinationtype.md +++ b/docs/models/height.md @@ -1,8 +1,8 @@ -# TestDestinationType +# Height ## Values | Name | Value | | -------- | -------- | -| `SILENT` | SILENT | \ No newline at end of file +| `HEIGHT` | height | \ No newline at end of file diff --git a/docs/models/inferencetype.md b/docs/models/inferencetype.md deleted file mode 100644 index 258891c2..00000000 --- a/docs/models/inferencetype.md +++ /dev/null @@ -1,11 +0,0 @@ -# InferenceType - -How to infer the types of the columns. If none, inference default to strings. - - -## Values - -| Name | Value | -| ---------------------- | ---------------------- | -| `NONE` | None | -| `PRIMITIVE_TYPES_ONLY` | Primitive Types Only | \ No newline at end of file diff --git a/docs/models/issuesstreamexpandwith.md b/docs/models/issuesstreamexpandwith.md deleted file mode 100644 index 00366676..00000000 --- a/docs/models/issuesstreamexpandwith.md +++ /dev/null @@ -1,10 +0,0 @@ -# IssuesStreamExpandWith - - -## Values - -| Name | Value | -| ----------------- | ----------------- | -| `RENDERED_FIELDS` | renderedFields | -| `TRANSITIONS` | transitions | -| `CHANGELOG` | changelog | \ No newline at end of file diff --git a/docs/models/jotform.md b/docs/models/jotform.md new file mode 100644 index 00000000..65c5e4a1 --- /dev/null +++ b/docs/models/jotform.md @@ -0,0 +1,8 @@ +# Jotform + + +## Values + +| Name | Value | +| --------- | --------- | +| `JOTFORM` | jotform | \ No newline at end of file diff --git a/docs/models/jsonl.md b/docs/models/jsonl.md deleted file mode 100644 index 0bdb858d..00000000 --- a/docs/models/jsonl.md +++ /dev/null @@ -1,13 +0,0 @@ -# Jsonl - -This connector uses PyArrow for JSON Lines (jsonl) file parsing. - - -## Fields - -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `block_size` | *Optional[int]* | :heavy_minus_sign: | The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors. | | -| `filetype` | [Optional[models.SourceS3SchemasFormatFiletype]](../models/sources3schemasformatfiletype.md) | :heavy_minus_sign: | N/A | | -| `newlines_in_values` | *Optional[bool]* | :heavy_minus_sign: | Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False. | | -| `unexpected_field_behavior` | [Optional[models.UnexpectedFieldBehavior]](../models/unexpectedfieldbehavior.md) | :heavy_minus_sign: | How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details | ignore | \ No newline at end of file diff --git a/docs/models/localfilesystemlimited.md b/docs/models/localfilesystemlimited.md new file mode 100644 index 00000000..5cbf0c9f --- /dev/null +++ b/docs/models/localfilesystemlimited.md @@ -0,0 +1,8 @@ +# LocalFilesystemLimited + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `storage` | [models.SourceFileSchemasProviderStorageProvider8Storage](../models/sourcefileschemasproviderstorageprovider8storage.md) | :heavy_check_mark: | WARNING: Note that the local storage URL available for reading must start with the local mount "/local/" at the moment until we implement more advanced docker mounting options. | \ No newline at end of file diff --git a/docs/models/metricsfilter.md b/docs/models/metricsfilter.md index 21b633b9..9a69ebf4 100644 --- a/docs/models/metricsfilter.md +++ b/docs/models/metricsfilter.md @@ -23,9 +23,9 @@ value: models.SourceGoogleAnalyticsDataAPIOrGroup = /* values here */ value: models.SourceGoogleAnalyticsDataAPINotExpression = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPIFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasFilter` ```python -value: models.SourceGoogleAnalyticsDataAPIFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasFilter = /* values here */ ``` diff --git a/docs/models/mockcatalog.md b/docs/models/mockcatalog.md deleted file mode 100644 index a21feb72..00000000 --- a/docs/models/mockcatalog.md +++ /dev/null @@ -1,17 +0,0 @@ -# MockCatalog - - -## Supported Types - -### `models.SingleSchema` - -```python -value: models.SingleSchema = /* values here */ -``` - -### `models.MultiSchema` - -```python -value: models.MultiSchema = /* values here */ -``` - diff --git a/docs/models/multischema.md b/docs/models/multischema.md deleted file mode 100644 index 4f2ac963..00000000 --- a/docs/models/multischema.md +++ /dev/null @@ -1,11 +0,0 @@ -# MultiSchema - -A catalog with multiple data streams, each with a different schema. - - -## Fields - -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `stream_schemas` | *Optional[str]* | :heavy_minus_sign: | A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples. | -| `type` | [Optional[models.SourceE2eTestCloudType]](../models/sourcee2etestcloudtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/nativenetworkencryptionnne.md b/docs/models/nativenetworkencryptionnne.md index e08ff481..3e7e6cd7 100644 --- a/docs/models/nativenetworkencryptionnne.md +++ b/docs/models/nativenetworkencryptionnne.md @@ -5,7 +5,7 @@ The native network encryption gives you the ability to encrypt database connecti ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | -| `encryption_algorithm` | [Optional[models.EncryptionAlgorithm]](../models/encryptionalgorithm.md) | :heavy_minus_sign: | This parameter defines what encryption algorithm is used. | -| `encryption_method` | [models.EncryptionMethod](../models/encryptionmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | +| `encryption_algorithm` | [Optional[models.EncryptionAlgorithm]](../models/encryptionalgorithm.md) | :heavy_minus_sign: | This parameter defines the database encryption algorithm. | +| `encryption_method` | [Optional[models.DestinationOracleEncryptionMethod]](../models/destinationoracleencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/nonet.md b/docs/models/nonet.md index 587c4105..13e73566 100644 --- a/docs/models/nonet.md +++ b/docs/models/nonet.md @@ -1,10 +1,10 @@ # NoneT -None. +No authentication will be used ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | -| `authorization` | [models.DestinationMongodbSchemasAuthorization](../models/destinationmongodbschemasauthorization.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `method` | [models.DestinationElasticsearchMethod](../models/destinationelasticsearchmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/nylas.md b/docs/models/nylas.md new file mode 100644 index 00000000..fbce3af1 --- /dev/null +++ b/docs/models/nylas.md @@ -0,0 +1,8 @@ +# Nylas + + +## Values + +| Name | Value | +| ------- | ------- | +| `NYLAS` | nylas | \ No newline at end of file diff --git a/docs/models/parquet.md b/docs/models/parquet.md deleted file mode 100644 index 9811e687..00000000 --- a/docs/models/parquet.md +++ /dev/null @@ -1,13 +0,0 @@ -# Parquet - -This connector utilises PyArrow (Apache Arrow) for Parquet parsing. - - -## Fields - -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `batch_size` | *Optional[int]* | :heavy_minus_sign: | Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide. | -| `buffer_size` | *Optional[int]* | :heavy_minus_sign: | Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide. | -| `columns` | List[*str*] | :heavy_minus_sign: | If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns. | -| `filetype` | [Optional[models.SourceS3Filetype]](../models/sources3filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/devnull.md b/docs/models/pgvector.md similarity index 64% rename from docs/models/devnull.md rename to docs/models/pgvector.md index 4b1d8ea4..36a93d23 100644 --- a/docs/models/devnull.md +++ b/docs/models/pgvector.md @@ -1,8 +1,8 @@ -# DevNull +# Pgvector ## Values | Name | Value | | ---------- | ---------- | -| `DEV_NULL` | dev-null | \ No newline at end of file +| `PGVECTOR` | pgvector | \ No newline at end of file diff --git a/docs/models/picqer.md b/docs/models/picqer.md new file mode 100644 index 00000000..b12b1400 --- /dev/null +++ b/docs/models/picqer.md @@ -0,0 +1,8 @@ +# Picqer + + +## Values + +| Name | Value | +| -------- | -------- | +| `PICQER` | picqer | \ No newline at end of file diff --git a/docs/models/piwik.md b/docs/models/piwik.md new file mode 100644 index 00000000..19aae76d --- /dev/null +++ b/docs/models/piwik.md @@ -0,0 +1,8 @@ +# Piwik + + +## Values + +| Name | Value | +| ------- | ------- | +| `PIWIK` | piwik | \ No newline at end of file diff --git a/docs/models/postgresconnection.md b/docs/models/postgresconnection.md new file mode 100644 index 00000000..fe94229a --- /dev/null +++ b/docs/models/postgresconnection.md @@ -0,0 +1,15 @@ +# PostgresConnection + +Postgres can be used to store vector data and retrieve embeddings. + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `credentials` | [models.DestinationPgvectorCredentials](../models/destinationpgvectorcredentials.md) | :heavy_check_mark: | N/A | | +| `database` | *str* | :heavy_check_mark: | Enter the name of the database that you want to sync data into | AIRBYTE_DATABASE | +| `default_schema` | *str* | :heavy_check_mark: | Enter the name of the default schema | AIRBYTE_SCHEMA | +| `host` | *str* | :heavy_check_mark: | Enter the account name you want to use to access the database. | AIRBYTE_ACCOUNT | +| `port` | *int* | :heavy_check_mark: | Enter the port you want to use to access the database | 5432 | +| `username` | *str* | :heavy_check_mark: | Enter the name of the user you want to use to access the database | AIRBYTE_USER | \ No newline at end of file diff --git a/docs/models/sourcee2etestcloudtype.md b/docs/models/productboard.md similarity index 58% rename from docs/models/sourcee2etestcloudtype.md rename to docs/models/productboard.md index 60e92ece..c565d881 100644 --- a/docs/models/sourcee2etestcloudtype.md +++ b/docs/models/productboard.md @@ -1,8 +1,8 @@ -# SourceE2eTestCloudType +# Productboard ## Values | Name | Value | | -------------- | -------------- | -| `MULTI_STREAM` | MULTI_STREAM | \ No newline at end of file +| `PRODUCTBOARD` | productboard | \ No newline at end of file diff --git a/docs/models/productive.md b/docs/models/productive.md new file mode 100644 index 00000000..713d1cdf --- /dev/null +++ b/docs/models/productive.md @@ -0,0 +1,8 @@ +# Productive + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `PRODUCTIVE` | productive | \ No newline at end of file diff --git a/docs/models/reportname.md b/docs/models/reportname.md index ef7fd879..a7162f46 100644 --- a/docs/models/reportname.md +++ b/docs/models/reportname.md @@ -41,4 +41,12 @@ | `GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE` | GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE | | `GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL` | GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL | | `GET_XML_BROWSE_TREE_DATA` | GET_XML_BROWSE_TREE_DATA | -| `GET_VENDOR_REAL_TIME_INVENTORY_REPORT` | GET_VENDOR_REAL_TIME_INVENTORY_REPORT | \ No newline at end of file +| `GET_VENDOR_REAL_TIME_INVENTORY_REPORT` | GET_VENDOR_REAL_TIME_INVENTORY_REPORT | +| `GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT` | GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT | +| `GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT` | GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT | +| `GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT` | GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT | +| `GET_SALES_AND_TRAFFIC_REPORT` | GET_SALES_AND_TRAFFIC_REPORT | +| `GET_VENDOR_SALES_REPORT` | GET_VENDOR_SALES_REPORT | +| `GET_VENDOR_INVENTORY_REPORT` | GET_VENDOR_INVENTORY_REPORT | +| `GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT` | GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT | +| `GET_VENDOR_TRAFFIC_REPORT` | GET_VENDOR_TRAFFIC_REPORT | \ No newline at end of file diff --git a/docs/models/s3amazonwebservices.md b/docs/models/s3amazonwebservices.md index 4c00e027..acccf156 100644 --- a/docs/models/s3amazonwebservices.md +++ b/docs/models/s3amazonwebservices.md @@ -1,17 +1,10 @@ # S3AmazonWebServices -Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services - ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `aws_access_key_id` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | -| `aws_secret_access_key` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | -| `bucket` | *Optional[str]* | :heavy_minus_sign: | Name of the S3 bucket where the file(s) exist. | | -| `endpoint` | *Optional[str]* | :heavy_minus_sign: | Endpoint to an S3 compatible service. Leave empty to use AWS. | | -| `path_prefix` | *Optional[str]* | :heavy_minus_sign: | By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate. | | -| `region_name` | *Optional[str]* | :heavy_minus_sign: | AWS region where the S3 bucket is located. If not provided, the region will be determined automatically. | | -| `role_arn` | *Optional[str]* | :heavy_minus_sign: | Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. Set the External ID to the Airbyte workspace ID, which can be found in the URL of this page. | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00Z | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `aws_access_key_id` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | +| `aws_secret_access_key` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | +| `storage` | [models.SourceFileSchemasStorage](../models/sourcefileschemasstorage.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sevenshifts.md b/docs/models/sevenshifts.md new file mode 100644 index 00000000..7452a09f --- /dev/null +++ b/docs/models/sevenshifts.md @@ -0,0 +1,8 @@ +# Sevenshifts + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `SEVENSHIFTS` | 7shifts | \ No newline at end of file diff --git a/docs/models/shortcut.md b/docs/models/shortcut.md new file mode 100644 index 00000000..185dc33b --- /dev/null +++ b/docs/models/shortcut.md @@ -0,0 +1,8 @@ +# Shortcut + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `SHORTCUT` | shortcut | \ No newline at end of file diff --git a/docs/models/silent.md b/docs/models/silent.md deleted file mode 100644 index 6c70a3fd..00000000 --- a/docs/models/silent.md +++ /dev/null @@ -1,8 +0,0 @@ -# Silent - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | ------------------------------------------------------------------------ | -| `test_destination_type` | [Optional[models.TestDestinationType]](../models/testdestinationtype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/source7shifts.md b/docs/models/source7shifts.md new file mode 100644 index 00000000..1e40827c --- /dev/null +++ b/docs/models/source7shifts.md @@ -0,0 +1,10 @@ +# Source7shifts + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Access token to use for authentication. Generate it in the 7shifts Developer Tools. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Sevenshifts](../models/sevenshifts.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceappcues.md b/docs/models/sourceappcues.md new file mode 100644 index 00000000..a462be72 --- /dev/null +++ b/docs/models/sourceappcues.md @@ -0,0 +1,12 @@ +# SourceAppcues + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | +| `account_id` | *str* | :heavy_check_mark: | Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account) | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.Appcues](../models/appcues.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceappfigures.md b/docs/models/sourceappfigures.md new file mode 100644 index 00000000..f85ea8c0 --- /dev/null +++ b/docs/models/sourceappfigures.md @@ -0,0 +1,12 @@ +# SourceAppfigures + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `group_by` | [Optional[models.GroupBy]](../models/groupby.md) | :heavy_minus_sign: | Category term for grouping the search results | +| `search_store` | *Optional[str]* | :heavy_minus_sign: | The store which needs to be searched in streams | +| `source_type` | [models.Appfigures](../models/appfigures.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceasana.md b/docs/models/sourceasana.md index 9ac19cae..f59bae67 100644 --- a/docs/models/sourceasana.md +++ b/docs/models/sourceasana.md @@ -3,9 +3,8 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | -| `credentials` | [Optional[models.AuthenticationMechanism]](../models/authenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate to Github | -| `organization_export_ids` | List[*Any*] | :heavy_minus_sign: | Globally unique identifiers for the organization exports | -| `source_type` | [Optional[models.SourceAsanaAsana]](../models/sourceasanaasana.md) | :heavy_minus_sign: | N/A | -| `test_mode` | *Optional[bool]* | :heavy_minus_sign: | This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod. | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | +| `credentials` | [Optional[models.AuthenticationMechanism]](../models/authenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate to Github | +| `organization_export_ids` | List[*Any*] | :heavy_minus_sign: | Globally unique identifiers for the organization exports | +| `source_type` | [Optional[models.SourceAsanaAsana]](../models/sourceasanaasana.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceazureblobstorageschemasstreamsformatformatfiletype.md b/docs/models/sourceazureblobstorageschemasstreamsformatformatfiletype.md deleted file mode 100644 index 7c6f6e4f..00000000 --- a/docs/models/sourceazureblobstorageschemasstreamsformatformatfiletype.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype - - -## Values - -| Name | Value | -| ------ | ------ | -| `AVRO` | avro | \ No newline at end of file diff --git a/docs/models/sourcebitly.md b/docs/models/sourcebitly.md new file mode 100644 index 00000000..8a04f293 --- /dev/null +++ b/docs/models/sourcebitly.md @@ -0,0 +1,11 @@ +# SourceBitly + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Bitly](../models/bitly.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcebrevo.md b/docs/models/sourcebrevo.md new file mode 100644 index 00000000..db1324ff --- /dev/null +++ b/docs/models/sourcebrevo.md @@ -0,0 +1,10 @@ +# SourceBrevo + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Brevo](../models/brevo.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcebuildkite.md b/docs/models/sourcebuildkite.md new file mode 100644 index 00000000..ce636aa2 --- /dev/null +++ b/docs/models/sourcebuildkite.md @@ -0,0 +1,10 @@ +# SourceBuildkite + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Buildkite](../models/buildkite.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcebuzzsprout.md b/docs/models/sourcebuzzsprout.md new file mode 100644 index 00000000..d1ce3302 --- /dev/null +++ b/docs/models/sourcebuzzsprout.md @@ -0,0 +1,11 @@ +# SourceBuzzsprout + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `podcast_id` | *str* | :heavy_check_mark: | Podcast ID found in `https://www.buzzsprout.com/my/profile/api` | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Buzzsprout](../models/buzzsprout.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcecanny.md b/docs/models/sourcecanny.md new file mode 100644 index 00000000..830f996d --- /dev/null +++ b/docs/models/sourcecanny.md @@ -0,0 +1,9 @@ +# SourceCanny + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | ------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | You can find your secret API key in Your Canny Subdomain > Settings > API | +| `source_type` | [models.Canny](../models/canny.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcechameleon.md b/docs/models/sourcechameleon.md new file mode 100644 index 00000000..77a8e2ac --- /dev/null +++ b/docs/models/sourcechameleon.md @@ -0,0 +1,13 @@ +# SourceChameleon + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `filter_` | [Optional[models.Filter]](../models/filter_.md) | :heavy_minus_sign: | Filter for using in the `segments_experiences` stream | +| `limit` | *Optional[str]* | :heavy_minus_sign: | Max records per page limit | +| `source_type` | [models.Chameleon](../models/chameleon.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcecimis.md b/docs/models/sourcecimis.md new file mode 100644 index 00000000..a03b9d98 --- /dev/null +++ b/docs/models/sourcecimis.md @@ -0,0 +1,16 @@ +# SourceCimis + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `targets` | List[*Any*] | :heavy_check_mark: | N/A | +| `targets_type` | [models.TargetsType](../models/targetstype.md) | :heavy_check_mark: | N/A | +| `daily_data_items` | List[*Any*] | :heavy_minus_sign: | N/A | +| `hourly_data_items` | List[*Any*] | :heavy_minus_sign: | N/A | +| `source_type` | [models.Cimis](../models/cimis.md) | :heavy_check_mark: | N/A | +| `unit_of_measure` | [Optional[models.UnitOfMeasure]](../models/unitofmeasure.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourceclickhouse.md b/docs/models/sourceclickhouse.md index 9b34cc22..1d58b08b 100644 --- a/docs/models/sourceclickhouse.md +++ b/docs/models/sourceclickhouse.md @@ -12,4 +12,5 @@ | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with this username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | The port of the database. | 8123 | | `source_type` | [models.SourceClickhouseClickhouse](../models/sourceclickhouseclickhouse.md) | :heavy_check_mark: | N/A | | +| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. | | | `tunnel_method` | [Optional[models.SourceClickhouseSSHTunnelMethod]](../models/sourceclickhousesshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file diff --git a/docs/models/sourceconfiguration.md b/docs/models/sourceconfiguration.md index b4fe5cb0..6c368c0e 100644 --- a/docs/models/sourceconfiguration.md +++ b/docs/models/sourceconfiguration.md @@ -11,6 +11,12 @@ The values required to configure the source. value: models.SourceAha = /* values here */ ``` +### `models.Source7shifts` + +```python +value: models.Source7shifts = /* values here */ +``` + ### `models.SourceAirbyte` ```python @@ -59,6 +65,18 @@ value: models.SourceAmplitude = /* values here */ value: models.SourceApifyDataset = /* values here */ ``` +### `models.SourceAppcues` + +```python +value: models.SourceAppcues = /* values here */ +``` + +### `models.SourceAppfigures` + +```python +value: models.SourceAppfigures = /* values here */ +``` + ### `models.SourceAppfollow` ```python @@ -119,6 +137,12 @@ value: models.SourceBigquery = /* values here */ value: models.SourceBingAds = /* values here */ ``` +### `models.SourceBitly` + +```python +value: models.SourceBitly = /* values here */ +``` + ### `models.SourceBraintree` ```python @@ -137,18 +161,48 @@ value: models.SourceBraze = /* values here */ value: models.SourceBreezyHr = /* values here */ ``` +### `models.SourceBrevo` + +```python +value: models.SourceBrevo = /* values here */ +``` + +### `models.SourceBuildkite` + +```python +value: models.SourceBuildkite = /* values here */ +``` + +### `models.SourceBuzzsprout` + +```python +value: models.SourceBuzzsprout = /* values here */ +``` + ### `models.SourceCalendly` ```python value: models.SourceCalendly = /* values here */ ``` +### `models.SourceCanny` + +```python +value: models.SourceCanny = /* values here */ +``` + ### `models.SourceCart` ```python value: models.SourceCart = /* values here */ ``` +### `models.SourceChameleon` + +```python +value: models.SourceChameleon = /* values here */ +``` + ### `models.SourceChargebee` ```python @@ -161,6 +215,12 @@ value: models.SourceChargebee = /* values here */ value: models.SourceChartmogul = /* values here */ ``` +### `models.SourceCimis` + +```python +value: models.SourceCimis = /* values here */ +``` + ### `models.SourceClazar` ```python @@ -281,12 +341,6 @@ value: models.SourceDremio = /* values here */ value: models.SourceDynamodb = /* values here */ ``` -### `models.SourceE2eTestCloud` - -```python -value: models.SourceE2eTestCloud = /* values here */ -``` - ### `models.SourceEmailoctopus` ```python @@ -299,6 +353,12 @@ value: models.SourceEmailoctopus = /* values here */ value: models.SourceExchangeRates = /* values here */ ``` +### `models.SourceEzofficeinventory` + +```python +value: models.SourceEzofficeinventory = /* values here */ +``` + ### `models.SourceFacebookMarketing` ```python @@ -353,6 +413,12 @@ value: models.SourceFreshdesk = /* values here */ value: models.SourceFreshsales = /* values here */ ``` +### `models.SourceFront` + +```python +value: models.SourceFront = /* values here */ +``` + ### `models.SourceGainsightPx` ```python @@ -443,6 +509,12 @@ value: models.SourceGoogleSearchConsole = /* values here */ value: models.SourceGoogleSheets = /* values here */ ``` +### `models.SourceGoogleTasks` + +```python +value: models.SourceGoogleTasks = /* values here */ +``` + ### `models.SourceGoogleWebfonts` ```python @@ -461,6 +533,12 @@ value: models.SourceGreenhouse = /* values here */ value: models.SourceGridly = /* values here */ ``` +### `models.SourceGuru` + +```python +value: models.SourceGuru = /* values here */ +``` + ### `models.SourceHardcodedRecords` ```python @@ -473,6 +551,12 @@ value: models.SourceHardcodedRecords = /* values here */ value: models.SourceHarvest = /* values here */ ``` +### `models.SourceHeight` + +```python +value: models.SourceHeight = /* values here */ +``` + ### `models.SourceHibob` ```python @@ -539,6 +623,12 @@ value: models.SourceIterable = /* values here */ value: models.SourceJira = /* values here */ ``` +### `models.SourceJotform` + +```python +value: models.SourceJotform = /* values here */ +``` + ### `models.SourceK6Cloud` ```python @@ -725,6 +815,12 @@ value: models.SourceNorthpassLms = /* values here */ value: models.SourceNotion = /* values here */ ``` +### `models.SourceNylas` + +```python +value: models.SourceNylas = /* values here */ +``` + ### `models.SourceNytimes` ```python @@ -815,6 +911,12 @@ value: models.SourcePersistiq = /* values here */ value: models.SourcePexelsAPI = /* values here */ ``` +### `models.SourcePicqer` + +```python +value: models.SourcePicqer = /* values here */ +``` + ### `models.SourcePinterest` ```python @@ -827,6 +929,12 @@ value: models.SourcePinterest = /* values here */ value: models.SourcePipedrive = /* values here */ ``` +### `models.SourcePiwik` + +```python +value: models.SourcePiwik = /* values here */ +``` + ### `models.SourcePlanhat` ```python @@ -875,6 +983,18 @@ value: models.SourcePostmarkapp = /* values here */ value: models.SourcePrestashop = /* values here */ ``` +### `models.SourceProductboard` + +```python +value: models.SourceProductboard = /* values here */ +``` + +### `models.SourceProductive` + +```python +value: models.SourceProductive = /* values here */ +``` + ### `models.SourcePypi` ```python @@ -1031,6 +1151,12 @@ value: models.SourceSftpBulk = /* values here */ value: models.SourceShopify = /* values here */ ``` +### `models.SourceShortcut` + +```python +value: models.SourceShortcut = /* values here */ +``` + ### `models.SourceShortio` ```python @@ -1115,6 +1241,18 @@ value: models.SourceSurveySparrow = /* values here */ value: models.SourceSurveymonkey = /* values here */ ``` +### `models.SourceSurvicate` + +```python +value: models.SourceSurvicate = /* values here */ +``` + +### `models.SourceTeamwork` + +```python +value: models.SourceTeamwork = /* values here */ +``` + ### `models.SourceTempo` ```python @@ -1193,6 +1331,12 @@ value: models.SourceVantage = /* values here */ value: models.SourceWebflow = /* values here */ ``` +### `models.SourceWhenIWork` + +```python +value: models.SourceWhenIWork = /* values here */ +``` + ### `models.SourceWhiskyHunter` ```python @@ -1241,12 +1385,6 @@ value: models.SourceYoutubeAnalytics = /* values here */ value: models.SourceZendeskChat = /* values here */ ``` -### `models.SourceZendeskSell` - -```python -value: models.SourceZendeskSell = /* values here */ -``` - ### `models.SourceZendeskSunshine` ```python diff --git a/docs/models/sourcee2etestcloud.md b/docs/models/sourcee2etestcloud.md deleted file mode 100644 index 53104aa4..00000000 --- a/docs/models/sourcee2etestcloud.md +++ /dev/null @@ -1,11 +0,0 @@ -# SourceE2eTestCloud - - -## Supported Types - -### `models.ContinuousFeed` - -```python -value: models.ContinuousFeed = /* values here */ -``` - diff --git a/docs/models/sourcee2etestcloudschemastype.md b/docs/models/sourcee2etestcloudschemastype.md deleted file mode 100644 index 8b24717d..00000000 --- a/docs/models/sourcee2etestcloudschemastype.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceE2eTestCloudSchemasType - - -## Values - -| Name | Value | -| --------------- | --------------- | -| `SINGLE_STREAM` | SINGLE_STREAM | \ No newline at end of file diff --git a/docs/models/sourceezofficeinventory.md b/docs/models/sourceezofficeinventory.md new file mode 100644 index 00000000..8ea9d420 --- /dev/null +++ b/docs/models/sourceezofficeinventory.md @@ -0,0 +1,11 @@ +# SourceEzofficeinventory + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | Your EZOfficeInventory Access Token. API Access is disabled by default. Enable API Access in Settings > Integrations > API Integration and click on Update to generate a new access token | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | Earliest date you want to sync historical streams (inventory_histories, asset_histories, asset_stock_histories) from | +| `subdomain` | *str* | :heavy_check_mark: | The company name used in signup, also visible in the URL when logged in. | +| `source_type` | [models.Ezofficeinventory](../models/ezofficeinventory.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefacebookmarketing.md b/docs/models/sourcefacebookmarketing.md index 6d46c869..265973bc 100644 --- a/docs/models/sourcefacebookmarketing.md +++ b/docs/models/sourcefacebookmarketing.md @@ -8,12 +8,9 @@ | `account_ids` | List[*str*] | :heavy_check_mark: | The Facebook Ad account ID(s) to pull data from. The Ad account ID number is in the account dropdown menu or in your browser's address bar of your Meta Ads Manager. See the docs for more information. | 111111111111111 | | `credentials` | [models.SourceFacebookMarketingAuthentication](../models/sourcefacebookmarketingauthentication.md) | :heavy_check_mark: | Credentials for connecting to the Facebook Marketing API | | | `access_token` | *Optional[str]* | :heavy_minus_sign: | The value of the generated access token. From your App’s Dashboard, click on "Marketing API" then "Tools". Select permissions ads_management, ads_read, read_insights, business_management. Then click on "Get token". See the docs for more information. | | -| `action_breakdowns_allow_empty` | *Optional[bool]* | :heavy_minus_sign: | Allows action_breakdowns to be an empty list | | | `ad_statuses` | List[[models.ValidAdStatuses](../models/validadstatuses.md)] | :heavy_minus_sign: | Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out. | | | `adset_statuses` | List[[models.ValidAdSetStatuses](../models/validadsetstatuses.md)] | :heavy_minus_sign: | Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out. | | | `campaign_statuses` | List[[models.ValidCampaignStatuses](../models/validcampaignstatuses.md)] | :heavy_minus_sign: | Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out. | | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client Id for your OAuth app | | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret for your OAuth app | | | `custom_insights` | List[[models.InsightConfig](../models/insightconfig.md)] | :heavy_minus_sign: | A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on "add" to fill this field. | | | `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date until which you'd like to replicate data for all incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated between the start date and this end date will be replicated. Not setting this option will result in always syncing the latest data. | 2017-01-26T00:00:00Z | | `fetch_thumbnail_images` | *Optional[bool]* | :heavy_minus_sign: | Set to active if you want to fetch the thumbnail_url and store the result in thumbnail_data_url for each Ad Creative. | | diff --git a/docs/models/sourcefiles3amazonwebservices.md b/docs/models/sourcefiles3amazonwebservices.md deleted file mode 100644 index 4a02222a..00000000 --- a/docs/models/sourcefiles3amazonwebservices.md +++ /dev/null @@ -1,10 +0,0 @@ -# SourceFileS3AmazonWebServices - - -## Fields - -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `aws_access_key_id` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | -| `aws_secret_access_key` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | -| `storage` | [models.SourceFileSchemasStorage](../models/sourcefileschemasstorage.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcefileschemasproviderstorageprovider8storage.md b/docs/models/sourcefileschemasproviderstorageprovider8storage.md new file mode 100644 index 00000000..f296de7b --- /dev/null +++ b/docs/models/sourcefileschemasproviderstorageprovider8storage.md @@ -0,0 +1,10 @@ +# SourceFileSchemasProviderStorageProvider8Storage + +WARNING: Note that the local storage URL available for reading must start with the local mount "/local/" at the moment until we implement more advanced docker mounting options. + + +## Values + +| Name | Value | +| ------- | ------- | +| `LOCAL` | local | \ No newline at end of file diff --git a/docs/models/sourcefront.md b/docs/models/sourcefront.md new file mode 100644 index 00000000..b62c6bf2 --- /dev/null +++ b/docs/models/sourcefront.md @@ -0,0 +1,11 @@ +# SourceFront + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `page_limit` | *Optional[str]* | :heavy_minus_sign: | Page limit for the responses | +| `source_type` | [models.Front](../models/front.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegcscsvformat.md b/docs/models/sourcegcscsvformat.md index 59271ec5..0a996ffc 100644 --- a/docs/models/sourcegcscsvformat.md +++ b/docs/models/sourcegcscsvformat.md @@ -13,7 +13,6 @@ | `filetype` | [Optional[models.SourceGcsSchemasFiletype]](../models/sourcegcsschemasfiletype.md) | :heavy_minus_sign: | N/A | | `header_definition` | [Optional[models.SourceGcsCSVHeaderDefinition]](../models/sourcegcscsvheaderdefinition.md) | :heavy_minus_sign: | How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. | | `ignore_errors_on_fields_mismatch` | *Optional[bool]* | :heavy_minus_sign: | Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. | -| `inference_type` | [Optional[models.SourceGcsInferenceType]](../models/sourcegcsinferencetype.md) | :heavy_minus_sign: | How to infer the types of the columns. If none, inference default to strings. | | `null_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. | | `quote_char` | *Optional[str]* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | | `skip_rows_after_header` | *Optional[int]* | :heavy_minus_sign: | The number of rows to skip after the header row. | diff --git a/docs/models/sourcegcsfilebasedstreamconfig.md b/docs/models/sourcegcsfilebasedstreamconfig.md index 86b7e334..20a8e7a3 100644 --- a/docs/models/sourcegcsfilebasedstreamconfig.md +++ b/docs/models/sourcegcsfilebasedstreamconfig.md @@ -10,8 +10,6 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `legacy_prefix` | *Optional[str]* | :heavy_minus_sign: | The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceGcsValidationPolicy]](../models/sourcegcsvalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcegcsinferencetype.md b/docs/models/sourcegcsinferencetype.md deleted file mode 100644 index 78c9691f..00000000 --- a/docs/models/sourcegcsinferencetype.md +++ /dev/null @@ -1,11 +0,0 @@ -# SourceGcsInferenceType - -How to infer the types of the columns. If none, inference default to strings. - - -## Values - -| Name | Value | -| ---------------------- | ---------------------- | -| `NONE` | None | -| `PRIMITIVE_TYPES_ONLY` | Primitive Types Only | \ No newline at end of file diff --git a/docs/models/sourcegithub.md b/docs/models/sourcegithub.md index bba4890c..f03b0a1c 100644 --- a/docs/models/sourcegithub.md +++ b/docs/models/sourcegithub.md @@ -8,9 +8,7 @@ | `credentials` | [models.SourceGithubAuthentication](../models/sourcegithubauthentication.md) | :heavy_check_mark: | Choose how to authenticate to GitHub | | | `repositories` | List[*str*] | :heavy_check_mark: | List of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/a* for matching multiple repositories by pattern. | airbytehq/airbyte | | `api_url` | *Optional[str]* | :heavy_minus_sign: | Please enter your basic URL from self-hosted GitHub instance or leave it empty to use GitHub. | https://github.com | -| `branch` | *Optional[str]* | :heavy_minus_sign: | (DEPRCATED) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled. | airbytehq/airbyte/master airbytehq/airbyte/my-branch | | `branches` | List[*str*] | :heavy_minus_sign: | List of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled. | airbytehq/airbyte/master | | `max_waiting_time` | *Optional[int]* | :heavy_minus_sign: | Max Waiting Time for rate limit. Set higher value to wait till rate limits will be resetted to continue sync | 10 | -| `repository` | *Optional[str]* | :heavy_minus_sign: | (DEPRCATED) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories. | airbytehq/airbyte airbytehq/another-repo | | `source_type` | [models.SourceGithubGithub](../models/sourcegithubgithub.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info | 2021-03-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcegitlab.md b/docs/models/sourcegitlab.md index 2a6c2b54..9c24fa65 100644 --- a/docs/models/sourcegitlab.md +++ b/docs/models/sourcegitlab.md @@ -7,9 +7,7 @@ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `credentials` | [models.SourceGitlabAuthorizationMethod](../models/sourcegitlabauthorizationmethod.md) | :heavy_check_mark: | N/A | | | `api_url` | *Optional[str]* | :heavy_minus_sign: | Please enter your basic URL from GitLab instance. | gitlab.com | -| `groups` | *Optional[str]* | :heavy_minus_sign: | [DEPRECATED] Space-delimited list of groups. e.g. airbyte.io. | airbyte.io | | `groups_list` | List[*str*] | :heavy_minus_sign: | List of groups. e.g. airbyte.io. | airbyte.io | -| `projects` | *Optional[str]* | :heavy_minus_sign: | [DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab. | airbyte.io/documentation | | `projects_list` | List[*str*] | :heavy_minus_sign: | Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab. | airbyte.io/documentation | | `source_type` | [models.SourceGitlabGitlab](../models/sourcegitlabgitlab.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data for GitLab API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data will be replicated. All data generated after this date will be replicated. | 2021-03-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapiexpression.md b/docs/models/sourcegoogleanalyticsdataapiexpression.md index 454ba521..caedebaf 100644 --- a/docs/models/sourcegoogleanalyticsdataapiexpression.md +++ b/docs/models/sourcegoogleanalyticsdataapiexpression.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `field_name` | *str* | :heavy_check_mark: | N/A | -| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `field_name` | *str* | :heavy_check_mark: | N/A | +| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapifilter.md b/docs/models/sourcegoogleanalyticsdataapifilter.md index 2e7acfd4..7c5d0009 100644 --- a/docs/models/sourcegoogleanalyticsdataapifilter.md +++ b/docs/models/sourcegoogleanalyticsdataapifilter.md @@ -1,12 +1,12 @@ # SourceGoogleAnalyticsDataAPIFilter -A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics. +A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions. ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `field_name` | *str* | :heavy_check_mark: | N/A | -| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md) | :heavy_check_mark: | N/A | -| `filter_type` | [Optional[models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType]](../models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter4filtertype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `field_name` | *str* | :heavy_check_mark: | N/A | +| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md) | :heavy_check_mark: | N/A | +| `filter_type` | [Optional[models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType]](../models/sourcegoogleanalyticsdataapischemascustomreportsarrayfiltertype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filter.md new file mode 100644 index 00000000..164439a0 --- /dev/null +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filter.md @@ -0,0 +1,29 @@ +# SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter + + +## Supported Types + +### `models.SourceGoogleAnalyticsDataAPISchemasStringFilter` + +```python +value: models.SourceGoogleAnalyticsDataAPISchemasStringFilter = /* values here */ +``` + +### `models.SourceGoogleAnalyticsDataAPISchemasInListFilter` + +```python +value: models.SourceGoogleAnalyticsDataAPISchemasInListFilter = /* values here */ +``` + +### `models.SourceGoogleAnalyticsDataAPISchemasNumericFilter` + +```python +value: models.SourceGoogleAnalyticsDataAPISchemasNumericFilter = /* values here */ +``` + +### `models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter` + +```python +value: models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter = /* values here */ +``` + diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md index a05ff8ac..349f1621 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### `models.SourceGoogleAnalyticsDataAPISchemasStringFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasInListFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasNumericFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md index df7903c1..8453b1b8 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayexpression.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayexpression.md index 56d6fc6e..a0de6300 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayexpression.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayexpression.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `field_name` | *str* | :heavy_check_mark: | N/A | -| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `field_name` | *str* | :heavy_check_mark: | N/A | +| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filter.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md index 354bc3c7..98057477 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarrayfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### `models.SourceGoogleAnalyticsDataAPIStringFilter` +### `models.StringFilter` ```python -value: models.SourceGoogleAnalyticsDataAPIStringFilter = /* values here */ +value: models.StringFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPIInListFilter` +### `models.InListFilter` ```python -value: models.SourceGoogleAnalyticsDataAPIInListFilter = /* values here */ +value: models.InListFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPINumericFilter` +### `models.NumericFilter` ```python -value: models.SourceGoogleAnalyticsDataAPINumericFilter = /* values here */ +value: models.NumericFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPIBetweenFilter` +### `models.BetweenFilter` ```python -value: models.SourceGoogleAnalyticsDataAPIBetweenFilter = /* values here */ +value: models.BetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md index 5fd492ea..7131a8a2 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md @@ -3,27 +3,27 @@ ## Supported Types -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter` +### `models.SourceGoogleAnalyticsDataAPIStringFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIStringFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter` +### `models.SourceGoogleAnalyticsDataAPIInListFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIInListFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter` +### `models.SourceGoogleAnalyticsDataAPINumericFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPINumericFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter` +### `models.SourceGoogleAnalyticsDataAPIBetweenFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPIBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filter.md similarity index 60% rename from docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md rename to docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filter.md index e64b1101..87b4513f 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter1filter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter1filter.md @@ -1,29 +1,29 @@ -# SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter +# SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter ## Supported Types -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter = /* values here */ ``` -### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter` +### `models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter` ```python -value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter = /* values here */ +value: models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter = /* values here */ ``` diff --git a/docs/models/sourcegoogleanalyticsdataapischemasexpression.md b/docs/models/sourcegoogleanalyticsdataapischemasexpression.md index 8d8b6742..55beb8e0 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemasexpression.md +++ b/docs/models/sourcegoogleanalyticsdataapischemasexpression.md @@ -3,7 +3,7 @@ ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| `field_name` | *str* | :heavy_check_mark: | N/A | -| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilterfilter.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `field_name` | *str* | :heavy_check_mark: | N/A | +| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraydimensionfilterdimensionsfilter3filter.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogleanalyticsdataapischemasfilter.md b/docs/models/sourcegoogleanalyticsdataapischemasfilter.md index 3f18bfbf..2ad28f5d 100644 --- a/docs/models/sourcegoogleanalyticsdataapischemasfilter.md +++ b/docs/models/sourcegoogleanalyticsdataapischemasfilter.md @@ -1,29 +1,12 @@ # SourceGoogleAnalyticsDataAPISchemasFilter +A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics. -## Supported Types -### `models.StringFilter` - -```python -value: models.StringFilter = /* values here */ -``` - -### `models.InListFilter` - -```python -value: models.InListFilter = /* values here */ -``` - -### `models.NumericFilter` - -```python -value: models.NumericFilter = /* values here */ -``` - -### `models.BetweenFilter` - -```python -value: models.BetweenFilter = /* values here */ -``` +## Fields +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `field_name` | *str* | :heavy_check_mark: | N/A | +| `filter_` | [models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter](../models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfilterfilter.md) | :heavy_check_mark: | N/A | +| `filter_type` | [Optional[models.SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType]](../models/sourcegoogleanalyticsdataapischemascustomreportsarraymetricfiltermetricsfilter4filtertype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sourcegoogledrivefilebasedstreamconfig.md b/docs/models/sourcegoogledrivefilebasedstreamconfig.md index 40805692..8cc7d88f 100644 --- a/docs/models/sourcegoogledrivefilebasedstreamconfig.md +++ b/docs/models/sourcegoogledrivefilebasedstreamconfig.md @@ -10,6 +10,5 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceGoogleDriveValidationPolicy]](../models/sourcegoogledrivevalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcegooglesearchconsole.md b/docs/models/sourcegooglesearchconsole.md index cd3e6a3a..935a9a48 100644 --- a/docs/models/sourcegooglesearchconsole.md +++ b/docs/models/sourcegooglesearchconsole.md @@ -7,7 +7,6 @@ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `authorization` | [models.AuthenticationType](../models/authenticationtype.md) | :heavy_check_mark: | N/A | | | `site_urls` | List[*str*] | :heavy_check_mark: | The URLs of the website property attached to your GSC account. Learn more about properties here. | https://example1.com/ | -| `custom_reports` | *Optional[str]* | :heavy_minus_sign: | (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports. | | | `custom_reports_array` | List[[models.SourceGoogleSearchConsoleCustomReportConfig](../models/sourcegooglesearchconsolecustomreportconfig.md)] | :heavy_minus_sign: | You can add your Custom Analytics report by creating one. | | | `data_state` | [Optional[models.DataFreshness]](../models/datafreshness.md) | :heavy_minus_sign: | If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. | final | | `end_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_minus_sign: | UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. | 2021-12-12 | diff --git a/docs/models/sourcegoogletasks.md b/docs/models/sourcegoogletasks.md new file mode 100644 index 00000000..68757afa --- /dev/null +++ b/docs/models/sourcegoogletasks.md @@ -0,0 +1,11 @@ +# SourceGoogleTasks + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `records_limit` | *Optional[str]* | :heavy_minus_sign: | The maximum number of records to be returned per request | +| `source_type` | [models.GoogleTasks](../models/googletasks.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceguru.md b/docs/models/sourceguru.md new file mode 100644 index 00000000..d784d93e --- /dev/null +++ b/docs/models/sourceguru.md @@ -0,0 +1,13 @@ +# SourceGuru + + +## Fields + +| Field | Type | Required | Description | +| --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `search_cards_query` | *Optional[str]* | :heavy_minus_sign: | Query for searching cards | +| `source_type` | [models.Guru](../models/guru.md) | :heavy_check_mark: | N/A | +| `team_id` | *Optional[str]* | :heavy_minus_sign: | Team ID received through response of /teams streams, make sure about access to the team | \ No newline at end of file diff --git a/docs/models/sourceharvest.md b/docs/models/sourceharvest.md index 90b7f74e..482e2e43 100644 --- a/docs/models/sourceharvest.md +++ b/docs/models/sourceharvest.md @@ -8,5 +8,4 @@ | `account_id` | *str* | :heavy_check_mark: | Harvest account ID. Required for all Harvest requests in pair with Personal Access Token | | | `replication_start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. | 2017-01-25T00:00:00Z | | `credentials` | [Optional[models.SourceHarvestAuthenticationMechanism]](../models/sourceharvestauthenticationmechanism.md) | :heavy_minus_sign: | Choose how to authenticate to Harvest. | | -| `replication_end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated. | 2017-01-25T00:00:00Z | | `source_type` | [models.Harvest](../models/harvest.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourceheight.md b/docs/models/sourceheight.md new file mode 100644 index 00000000..4b2a63b6 --- /dev/null +++ b/docs/models/sourceheight.md @@ -0,0 +1,11 @@ +# SourceHeight + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `search_query` | *Optional[str]* | :heavy_minus_sign: | Search query to be used with search stream | +| `source_type` | [models.Height](../models/height.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceinstagram.md b/docs/models/sourceinstagram.md index 6de538c7..94c89707 100644 --- a/docs/models/sourceinstagram.md +++ b/docs/models/sourceinstagram.md @@ -6,7 +6,5 @@ | Field | Type | Required | Description | Example | | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `access_token` | *str* | :heavy_check_mark: | The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information | | -| `client_id` | *Optional[str]* | :heavy_minus_sign: | The Client ID for your Oauth application | | -| `client_secret` | *Optional[str]* | :heavy_minus_sign: | The Client Secret for your Oauth application | | | `source_type` | [models.SourceInstagramInstagram](../models/sourceinstagraminstagram.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date. | 2017-01-25T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcejira.md b/docs/models/sourcejira.md index 9492c0d8..96098bba 100644 --- a/docs/models/sourcejira.md +++ b/docs/models/sourcejira.md @@ -9,11 +9,7 @@ | `domain` | *str* | :heavy_check_mark: | The Domain for your Jira account, e.g. airbyteio.atlassian.net, airbyteio.jira.com, jira.your-domain.com | .atlassian.net | | `email` | *str* | :heavy_check_mark: | The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth. | | | `enable_experimental_streams` | *Optional[bool]* | :heavy_minus_sign: | Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info. | | -| `expand_issue_changelog` | *Optional[bool]* | :heavy_minus_sign: | (DEPRECATED) Expand the changelog when replicating issues. | | -| `expand_issue_transition` | *Optional[bool]* | :heavy_minus_sign: | (DEPRECATED) Expand the transitions when replicating issues. | | -| `issues_stream_expand_with` | List[[models.IssuesStreamExpandWith](../models/issuesstreamexpandwith.md)] | :heavy_minus_sign: | Select fields to Expand the `Issues` stream when replicating with: | | | `lookback_window_minutes` | *Optional[int]* | :heavy_minus_sign: | When set to N, the connector will always refresh resources created within the past N minutes. By default, updated objects that are not newly created are not incrementally synced. | 60 | | `projects` | List[*str*] | :heavy_minus_sign: | List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects. | PROJ1 | -| `render_fields` | *Optional[bool]* | :heavy_minus_sign: | (DEPRECATED) Render issue fields in HTML format in addition to Jira JSON-like format. | | | `source_type` | [models.Jira](../models/jira.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation. | 2021-03-01T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sourcejotform.md b/docs/models/sourcejotform.md new file mode 100644 index 00000000..f6c2c023 --- /dev/null +++ b/docs/models/sourcejotform.md @@ -0,0 +1,12 @@ +# SourceJotform + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_endpoint` | [models.APIEndpoint](../models/apiendpoint.md) | :heavy_check_mark: | N/A | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Jotform](../models/jotform.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcejotformapiendpoint.md b/docs/models/sourcejotformapiendpoint.md new file mode 100644 index 00000000..b0f9f3b6 --- /dev/null +++ b/docs/models/sourcejotformapiendpoint.md @@ -0,0 +1,8 @@ +# SourceJotformAPIEndpoint + + +## Values + +| Name | Value | +| ------- | ------- | +| `BASIC` | basic | \ No newline at end of file diff --git a/docs/models/sourcejotformschemasapiendpoint.md b/docs/models/sourcejotformschemasapiendpoint.md new file mode 100644 index 00000000..ec704719 --- /dev/null +++ b/docs/models/sourcejotformschemasapiendpoint.md @@ -0,0 +1,8 @@ +# SourceJotformSchemasAPIEndpoint + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `ENTERPRISE` | enterprise | \ No newline at end of file diff --git a/docs/models/sourcekyve.md b/docs/models/sourcekyve.md index bd2aae2e..9dfc7943 100644 --- a/docs/models/sourcekyve.md +++ b/docs/models/sourcekyve.md @@ -7,7 +7,5 @@ | ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | | `pool_ids` | *str* | :heavy_check_mark: | The IDs of the KYVE storage pool you want to archive. (Comma separated) | 0 | | `start_ids` | *str* | :heavy_check_mark: | The start-id defines, from which bundle id the pipeline should start to extract the data. (Comma separated) | 0 | -| `max_pages` | *Optional[int]* | :heavy_minus_sign: | The maximum amount of pages to go trough. Set to 'null' for all pages. | | -| `page_size` | *Optional[int]* | :heavy_minus_sign: | The pagesize for pagination, smaller numbers are used in integration tests. | | | `source_type` | [models.Kyve](../models/kyve.md) | :heavy_check_mark: | N/A | | | `url_base` | *Optional[str]* | :heavy_minus_sign: | URL to the KYVE Chain API. | https://api.kaon.kyve.network/ | \ No newline at end of file diff --git a/docs/models/sourcelinkedinads.md b/docs/models/sourcelinkedinads.md index b7ec7eee..ceaafb86 100644 --- a/docs/models/sourcelinkedinads.md +++ b/docs/models/sourcelinkedinads.md @@ -9,4 +9,5 @@ | `account_ids` | List[*int*] | :heavy_minus_sign: | Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs. | 123456789 | | `ad_analytics_reports` | List[[models.AdAnalyticsReportConfiguration](../models/adanalyticsreportconfiguration.md)] | :heavy_minus_sign: | N/A | | | `credentials` | [Optional[models.SourceLinkedinAdsAuthentication]](../models/sourcelinkedinadsauthentication.md) | :heavy_minus_sign: | N/A | | +| `lookback_window` | *Optional[int]* | :heavy_minus_sign: | How far into the past to look for records. (in days) | | | `source_type` | [models.SourceLinkedinAdsLinkedinAds](../models/sourcelinkedinadslinkedinads.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcemailchimp.md b/docs/models/sourcemailchimp.md index f7587e10..66e8334e 100644 --- a/docs/models/sourcemailchimp.md +++ b/docs/models/sourcemailchimp.md @@ -6,6 +6,5 @@ | Field | Type | Required | Description | Example | | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `credentials` | [Optional[models.SourceMailchimpAuthentication]](../models/sourcemailchimpauthentication.md) | :heavy_minus_sign: | N/A | | -| `data_center` | *Optional[str]* | :heavy_minus_sign: | Technical fields used to identify datacenter to send request to | | | `source_type` | [models.SourceMailchimpMailchimp](../models/sourcemailchimpmailchimp.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The date from which you want to start syncing data for Incremental streams. Only records that have been created or modified since this date will be synced. If left blank, all data will by synced. | 2020-01-01T00:00:00.000Z | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftonedrivefilebasedstreamconfig.md b/docs/models/sourcemicrosoftonedrivefilebasedstreamconfig.md index 3a0df6b9..c8a8eede 100644 --- a/docs/models/sourcemicrosoftonedrivefilebasedstreamconfig.md +++ b/docs/models/sourcemicrosoftonedrivefilebasedstreamconfig.md @@ -10,6 +10,5 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceMicrosoftOnedriveValidationPolicy]](../models/sourcemicrosoftonedrivevalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcemicrosoftsharepointfilebasedstreamconfig.md b/docs/models/sourcemicrosoftsharepointfilebasedstreamconfig.md index 85adedb6..82d0a679 100644 --- a/docs/models/sourcemicrosoftsharepointfilebasedstreamconfig.md +++ b/docs/models/sourcemicrosoftsharepointfilebasedstreamconfig.md @@ -10,7 +10,6 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceMicrosoftSharepointValidationPolicy]](../models/sourcemicrosoftsharepointvalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcemssqlsslmethod.md b/docs/models/sourcemssqlsslmethod.md index d8166280..d397fdc7 100644 --- a/docs/models/sourcemssqlsslmethod.md +++ b/docs/models/sourcemssqlsslmethod.md @@ -5,10 +5,10 @@ The encryption method which is used when communicating with the database. ## Supported Types -### `models.Unencrypted` +### `models.SourceMssqlUnencrypted` ```python -value: models.Unencrypted = /* values here */ +value: models.SourceMssqlUnencrypted = /* values here */ ``` ### `models.SourceMssqlEncryptedTrustServerCertificate` diff --git a/docs/models/sourcemssqlunencrypted.md b/docs/models/sourcemssqlunencrypted.md new file mode 100644 index 00000000..bbd49faa --- /dev/null +++ b/docs/models/sourcemssqlunencrypted.md @@ -0,0 +1,10 @@ +# SourceMssqlUnencrypted + +Data transfer will not be encrypted. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | +| `ssl_method` | [models.SourceMssqlSchemasSslMethod](../models/sourcemssqlschemassslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcemysql.md b/docs/models/sourcemysql.md index 0d58482a..f4fdfbd3 100644 --- a/docs/models/sourcemysql.md +++ b/docs/models/sourcemysql.md @@ -13,5 +13,6 @@ | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | | | `port` | *Optional[int]* | :heavy_minus_sign: | The port to connect to. | 3306 | | `source_type` | [models.SourceMysqlMysql](../models/sourcemysqlmysql.md) | :heavy_check_mark: | N/A | | +| `ssl` | *Optional[bool]* | :heavy_minus_sign: | Encrypt data using SSL. | | | `ssl_mode` | [Optional[models.SourceMysqlSSLModes]](../models/sourcemysqlsslmodes.md) | :heavy_minus_sign: | SSL connection modes. Read more in the docs. | | | `tunnel_method` | [Optional[models.SourceMysqlSSHTunnelMethod]](../models/sourcemysqlsshtunnelmethod.md) | :heavy_minus_sign: | Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. | | \ No newline at end of file diff --git a/docs/models/sourcenylas.md b/docs/models/sourcenylas.md new file mode 100644 index 00000000..2715b46d --- /dev/null +++ b/docs/models/sourcenylas.md @@ -0,0 +1,12 @@ +# SourceNylas + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `api_server` | [models.APIServer](../models/apiserver.md) | :heavy_check_mark: | N/A | +| `end_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Nylas](../models/nylas.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracle.md b/docs/models/sourceoracle.md index 28458dff..1f5f81ac 100644 --- a/docs/models/sourceoracle.md +++ b/docs/models/sourceoracle.md @@ -5,10 +5,10 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `encryption` | [models.Encryption](../models/encryption.md) | :heavy_check_mark: | The encryption method with is used when communicating with the database. | | `host` | *str* | :heavy_check_mark: | Hostname of the database. | | `username` | *str* | :heavy_check_mark: | The username which is used to access the database. | | `connection_data` | [Optional[models.ConnectBy]](../models/connectby.md) | :heavy_minus_sign: | Connect data that will be used for DB connection | +| `encryption` | [Optional[models.SourceOracleEncryption]](../models/sourceoracleencryption.md) | :heavy_minus_sign: | The encryption method with is used when communicating with the database. | | `jdbc_url_params` | *Optional[str]* | :heavy_minus_sign: | Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). | | `password` | *Optional[str]* | :heavy_minus_sign: | The password associated with the username. | | `port` | *Optional[int]* | :heavy_minus_sign: | Port of the database.
Oracle Corporations recommends the following port numbers:
1521 - Default listening port for client connections to the listener.
2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL | diff --git a/docs/models/sourceoracleencryption.md b/docs/models/sourceoracleencryption.md new file mode 100644 index 00000000..aebd4fac --- /dev/null +++ b/docs/models/sourceoracleencryption.md @@ -0,0 +1,25 @@ +# SourceOracleEncryption + +The encryption method with is used when communicating with the database. + + +## Supported Types + +### `models.SourceOracleUnencrypted` + +```python +value: models.SourceOracleUnencrypted = /* values here */ +``` + +### `models.SourceOracleNativeNetworkEncryptionNNE` + +```python +value: models.SourceOracleNativeNetworkEncryptionNNE = /* values here */ +``` + +### `models.SourceOracleTLSEncryptedVerifyCertificate` + +```python +value: models.SourceOracleTLSEncryptedVerifyCertificate = /* values here */ +``` + diff --git a/docs/models/sourceoracleencryptionalgorithm.md b/docs/models/sourceoracleencryptionalgorithm.md new file mode 100644 index 00000000..67b23fea --- /dev/null +++ b/docs/models/sourceoracleencryptionalgorithm.md @@ -0,0 +1,12 @@ +# SourceOracleEncryptionAlgorithm + +This parameter defines what encryption algorithm is used. + + +## Values + +| Name | Value | +| -------------- | -------------- | +| `AES256` | AES256 | +| `RC4_56` | RC4_56 | +| `THREE_DES168` | 3DES168 | \ No newline at end of file diff --git a/docs/models/sourceoracleencryptionmethod.md b/docs/models/sourceoracleencryptionmethod.md index 70bc1cc5..5f8a9baa 100644 --- a/docs/models/sourceoracleencryptionmethod.md +++ b/docs/models/sourceoracleencryptionmethod.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------------------------------ | ------------------------------ | -| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file +| Name | Value | +| ------------- | ------------- | +| `UNENCRYPTED` | unencrypted | \ No newline at end of file diff --git a/docs/models/sourceoraclenativenetworkencryptionnne.md b/docs/models/sourceoraclenativenetworkencryptionnne.md new file mode 100644 index 00000000..9f3866a8 --- /dev/null +++ b/docs/models/sourceoraclenativenetworkencryptionnne.md @@ -0,0 +1,11 @@ +# SourceOracleNativeNetworkEncryptionNNE + +The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports. + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `encryption_algorithm` | [Optional[models.SourceOracleEncryptionAlgorithm]](../models/sourceoracleencryptionalgorithm.md) | :heavy_minus_sign: | This parameter defines what encryption algorithm is used. | +| `encryption_method` | [models.SourceOracleSchemasEncryptionMethod](../models/sourceoracleschemasencryptionmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceoracleschemasencryptionencryptionmethod.md b/docs/models/sourceoracleschemasencryptionencryptionmethod.md new file mode 100644 index 00000000..0cdf3428 --- /dev/null +++ b/docs/models/sourceoracleschemasencryptionencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceOracleSchemasEncryptionEncryptionMethod + + +## Values + +| Name | Value | +| ------------------------------ | ------------------------------ | +| `ENCRYPTED_VERIFY_CERTIFICATE` | encrypted_verify_certificate | \ No newline at end of file diff --git a/docs/models/sourceoracleschemasencryptionmethod.md b/docs/models/sourceoracleschemasencryptionmethod.md new file mode 100644 index 00000000..adda5916 --- /dev/null +++ b/docs/models/sourceoracleschemasencryptionmethod.md @@ -0,0 +1,8 @@ +# SourceOracleSchemasEncryptionMethod + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `CLIENT_NNE` | client_nne | \ No newline at end of file diff --git a/docs/models/sourceoracletlsencryptedverifycertificate.md b/docs/models/sourceoracletlsencryptedverifycertificate.md new file mode 100644 index 00000000..3265445b --- /dev/null +++ b/docs/models/sourceoracletlsencryptedverifycertificate.md @@ -0,0 +1,11 @@ +# SourceOracleTLSEncryptedVerifyCertificate + +Verify and use the certificate provided by the server. + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | +| `ssl_certificate` | *str* | :heavy_check_mark: | Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations. | +| `encryption_method` | [models.SourceOracleSchemasEncryptionEncryptionMethod](../models/sourceoracleschemasencryptionencryptionmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/avro.md b/docs/models/sourceoracleunencrypted.md similarity index 71% rename from docs/models/avro.md rename to docs/models/sourceoracleunencrypted.md index 0b2b2104..f9ff1a6d 100644 --- a/docs/models/avro.md +++ b/docs/models/sourceoracleunencrypted.md @@ -1,10 +1,10 @@ -# Avro +# SourceOracleUnencrypted -This connector utilises fastavro for Avro parsing. +Data transfer will not be encrypted. ## Fields | Field | Type | Required | Description | | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -------------------------------------------------------------------------------- | -| `filetype` | [Optional[models.SourceS3SchemasFiletype]](../models/sources3schemasfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `encryption_method` | [models.SourceOracleEncryptionMethod](../models/sourceoracleencryptionmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcepicqer.md b/docs/models/sourcepicqer.md new file mode 100644 index 00000000..284dfa45 --- /dev/null +++ b/docs/models/sourcepicqer.md @@ -0,0 +1,12 @@ +# SourcePicqer + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `organization_name` | *str* | :heavy_check_mark: | The organization name which is used to login to picqer | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.Picqer](../models/picqer.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcepiwik.md b/docs/models/sourcepiwik.md new file mode 100644 index 00000000..345fa418 --- /dev/null +++ b/docs/models/sourcepiwik.md @@ -0,0 +1,11 @@ +# SourcePiwik + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `client_id` | *str* | :heavy_check_mark: | N/A | +| `client_secret` | *str* | :heavy_check_mark: | N/A | +| `organization_id` | *str* | :heavy_check_mark: | The organization id appearing at URL of your piwik website | +| `source_type` | [models.Piwik](../models/piwik.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceproductboard.md b/docs/models/sourceproductboard.md new file mode 100644 index 00000000..54663e23 --- /dev/null +++ b/docs/models/sourceproductboard.md @@ -0,0 +1,10 @@ +# SourceProductboard + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Your Productboard access token. See https://developer.productboard.com/reference/authentication for steps to generate one. | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Productboard](../models/productboard.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceproductive.md b/docs/models/sourceproductive.md new file mode 100644 index 00000000..b826d34c --- /dev/null +++ b/docs/models/sourceproductive.md @@ -0,0 +1,10 @@ +# SourceProductive + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `organization_id` | *str* | :heavy_check_mark: | The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations` page | +| `source_type` | [models.Productive](../models/productive.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sources3.md b/docs/models/sources3.md index b2e5e05a..60a25d70 100644 --- a/docs/models/sources3.md +++ b/docs/models/sources3.md @@ -6,19 +6,14 @@ because it is responsible for converting legacy S3 v3 configs into v4 configs us ## Fields -| Field | Type | Required | Description | Example | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `bucket` | *str* | :heavy_check_mark: | Name of the S3 bucket where the file(s) exist. | | -| `streams` | List[[models.SourceS3FileBasedStreamConfig](../models/sources3filebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | -| `aws_access_key_id` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | -| `aws_secret_access_key` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | -| `dataset` | *Optional[str]* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. | | -| `endpoint` | *Optional[str]* | :heavy_minus_sign: | Endpoint to an S3 compatible service. Leave empty to use AWS. The custom endpoint must be secure, but the 'https' prefix is not required. | my-s3-endpoint.com | -| `format` | [Optional[models.SourceS3FileFormat]](../models/sources3fileformat.md) | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate | | -| `path_pattern` | *Optional[str]* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use \| to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. | ** | -| `provider` | [Optional[models.S3AmazonWebServices]](../models/s3amazonwebservices.md) | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services | | -| `region_name` | *Optional[str]* | :heavy_minus_sign: | AWS region where the S3 bucket is located. If not provided, the region will be determined automatically. | | -| `role_arn` | *Optional[str]* | :heavy_minus_sign: | Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. Set the External ID to the Airbyte workspace ID, which can be found in the URL of this page. | | -| `schema` | *Optional[str]* | :heavy_minus_sign: | Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. | {"column_1": "number", "column_2": "string", "column_3": "array", "column_4": "object", "column_5": "boolean"} | -| `source_type` | [models.SourceS3S3](../models/sources3s3.md) | :heavy_check_mark: | N/A | | -| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00.000000Z | \ No newline at end of file +| Field | Type | Required | Description | Example | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `bucket` | *str* | :heavy_check_mark: | Name of the S3 bucket where the file(s) exist. | | +| `streams` | List[[models.SourceS3FileBasedStreamConfig](../models/sources3filebasedstreamconfig.md)] | :heavy_check_mark: | Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. | | +| `aws_access_key_id` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | +| `aws_secret_access_key` | *Optional[str]* | :heavy_minus_sign: | In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. | | +| `endpoint` | *Optional[str]* | :heavy_minus_sign: | Endpoint to an S3 compatible service. Leave empty to use AWS. | my-s3-endpoint.com | +| `region_name` | *Optional[str]* | :heavy_minus_sign: | AWS region where the S3 bucket is located. If not provided, the region will be determined automatically. | | +| `role_arn` | *Optional[str]* | :heavy_minus_sign: | Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. Set the External ID to the Airbyte workspace ID, which can be found in the URL of this page. | | +| `source_type` | [models.SourceS3S3](../models/sources3s3.md) | :heavy_check_mark: | N/A | | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. | 2021-01-01T00:00:00.000000Z | \ No newline at end of file diff --git a/docs/models/sources3avroformat.md b/docs/models/sources3avroformat.md index 9948e0dc..4aaa2d53 100644 --- a/docs/models/sources3avroformat.md +++ b/docs/models/sources3avroformat.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `double_as_string` | *Optional[bool]* | :heavy_minus_sign: | Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. | -| `filetype` | [Optional[models.SourceS3SchemasStreamsFiletype]](../models/sources3schemasstreamsfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `filetype` | [Optional[models.SourceS3Filetype]](../models/sources3filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sources3csvformat.md b/docs/models/sources3csvformat.md index 0f96b64d..c11ffc7a 100644 --- a/docs/models/sources3csvformat.md +++ b/docs/models/sources3csvformat.md @@ -10,10 +10,9 @@ | `encoding` | *Optional[str]* | :heavy_minus_sign: | The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. | | `escape_char` | *Optional[str]* | :heavy_minus_sign: | The character used for escaping special characters. To disallow escaping, leave this field blank. | | `false_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as false values. | -| `filetype` | [Optional[models.SourceS3SchemasStreamsFormatFiletype]](../models/sources3schemasstreamsformatfiletype.md) | :heavy_minus_sign: | N/A | +| `filetype` | [Optional[models.SourceS3SchemasFiletype]](../models/sources3schemasfiletype.md) | :heavy_minus_sign: | N/A | | `header_definition` | [Optional[models.SourceS3CSVHeaderDefinition]](../models/sources3csvheaderdefinition.md) | :heavy_minus_sign: | How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. | | `ignore_errors_on_fields_mismatch` | *Optional[bool]* | :heavy_minus_sign: | Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. | -| `inference_type` | [Optional[models.SourceS3InferenceType]](../models/sources3inferencetype.md) | :heavy_minus_sign: | How to infer the types of the columns. If none, inference default to strings. | | `null_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. | | `quote_char` | *Optional[str]* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | | `skip_rows_after_header` | *Optional[int]* | :heavy_minus_sign: | The number of rows to skip after the header row. | diff --git a/docs/models/sources3filebasedstreamconfig.md b/docs/models/sources3filebasedstreamconfig.md index 8f1a26a1..39127991 100644 --- a/docs/models/sources3filebasedstreamconfig.md +++ b/docs/models/sources3filebasedstreamconfig.md @@ -10,8 +10,6 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `legacy_prefix` | *Optional[str]* | :heavy_minus_sign: | The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceS3ValidationPolicy]](../models/sources3validationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sources3fileformat.md b/docs/models/sources3fileformat.md deleted file mode 100644 index e8b9547f..00000000 --- a/docs/models/sources3fileformat.md +++ /dev/null @@ -1,31 +0,0 @@ -# SourceS3FileFormat - -Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate - - -## Supported Types - -### `models.Csv` - -```python -value: models.Csv = /* values here */ -``` - -### `models.Parquet` - -```python -value: models.Parquet = /* values here */ -``` - -### `models.Avro` - -```python -value: models.Avro = /* values here */ -``` - -### `models.Jsonl` - -```python -value: models.Jsonl = /* values here */ -``` - diff --git a/docs/models/sources3filetype.md b/docs/models/sources3filetype.md index 227197e7..ac50abe1 100644 --- a/docs/models/sources3filetype.md +++ b/docs/models/sources3filetype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| --------- | --------- | -| `PARQUET` | parquet | \ No newline at end of file +| Name | Value | +| ------ | ------ | +| `AVRO` | avro | \ No newline at end of file diff --git a/docs/models/sources3inferencetype.md b/docs/models/sources3inferencetype.md deleted file mode 100644 index b85657b7..00000000 --- a/docs/models/sources3inferencetype.md +++ /dev/null @@ -1,11 +0,0 @@ -# SourceS3InferenceType - -How to infer the types of the columns. If none, inference default to strings. - - -## Values - -| Name | Value | -| ---------------------- | ---------------------- | -| `NONE` | None | -| `PRIMITIVE_TYPES_ONLY` | Primitive Types Only | \ No newline at end of file diff --git a/docs/models/sources3jsonlformat.md b/docs/models/sources3jsonlformat.md index 0c7ad310..aec256ad 100644 --- a/docs/models/sources3jsonlformat.md +++ b/docs/models/sources3jsonlformat.md @@ -3,6 +3,6 @@ ## Fields -| Field | Type | Required | Description | -| ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------- | -| `filetype` | [Optional[models.SourceS3SchemasStreamsFormatFormatFiletype]](../models/sources3schemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------- | +| `filetype` | [Optional[models.SourceS3SchemasStreamsFiletype]](../models/sources3schemasstreamsfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sources3parquetformat.md b/docs/models/sources3parquetformat.md index 6c035105..52c0f23f 100644 --- a/docs/models/sources3parquetformat.md +++ b/docs/models/sources3parquetformat.md @@ -6,4 +6,4 @@ | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | | `decimal_as_float` | *Optional[bool]* | :heavy_minus_sign: | Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. | -| `filetype` | [Optional[models.SourceS3SchemasStreamsFormatFormat4Filetype]](../models/sources3schemasstreamsformatformat4filetype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file +| `filetype` | [Optional[models.SourceS3SchemasStreamsFormatFiletype]](../models/sources3schemasstreamsformatfiletype.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/sources3schemasfiletype.md b/docs/models/sources3schemasfiletype.md index 180631d1..f272ce1b 100644 --- a/docs/models/sources3schemasfiletype.md +++ b/docs/models/sources3schemasfiletype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------ | ------ | -| `AVRO` | avro | \ No newline at end of file +| Name | Value | +| ----- | ----- | +| `CSV` | csv | \ No newline at end of file diff --git a/docs/models/sources3schemasstreamsfiletype.md b/docs/models/sources3schemasstreamsfiletype.md index 1888d570..18ffaf75 100644 --- a/docs/models/sources3schemasstreamsfiletype.md +++ b/docs/models/sources3schemasstreamsfiletype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------ | ------ | -| `AVRO` | avro | \ No newline at end of file +| Name | Value | +| ------- | ------- | +| `JSONL` | jsonl | \ No newline at end of file diff --git a/docs/models/sources3schemasstreamsformatfiletype.md b/docs/models/sources3schemasstreamsformatfiletype.md index 98c6564e..616a6c6e 100644 --- a/docs/models/sources3schemasstreamsformatfiletype.md +++ b/docs/models/sources3schemasstreamsformatfiletype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ----- | ----- | -| `CSV` | csv | \ No newline at end of file +| Name | Value | +| --------- | --------- | +| `PARQUET` | parquet | \ No newline at end of file diff --git a/docs/models/sources3schemasstreamsformatformat4filetype.md b/docs/models/sources3schemasstreamsformatformat4filetype.md deleted file mode 100644 index f5aad6d7..00000000 --- a/docs/models/sources3schemasstreamsformatformat4filetype.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceS3SchemasStreamsFormatFormat4Filetype - - -## Values - -| Name | Value | -| --------- | --------- | -| `PARQUET` | parquet | \ No newline at end of file diff --git a/docs/models/sources3schemasstreamsformatformat5filetype.md b/docs/models/sources3schemasstreamsformatformat5filetype.md deleted file mode 100644 index 9dd124f0..00000000 --- a/docs/models/sources3schemasstreamsformatformat5filetype.md +++ /dev/null @@ -1,8 +0,0 @@ -# SourceS3SchemasStreamsFormatFormat5Filetype - - -## Values - -| Name | Value | -| -------------- | -------------- | -| `UNSTRUCTURED` | unstructured | \ No newline at end of file diff --git a/docs/models/sources3schemasstreamsformatformatfiletype.md b/docs/models/sources3schemasstreamsformatformatfiletype.md index ed56cf3b..a3b343ce 100644 --- a/docs/models/sources3schemasstreamsformatformatfiletype.md +++ b/docs/models/sources3schemasstreamsformatformatfiletype.md @@ -3,6 +3,6 @@ ## Values -| Name | Value | -| ------- | ------- | -| `JSONL` | jsonl | \ No newline at end of file +| Name | Value | +| -------------- | -------------- | +| `UNSTRUCTURED` | unstructured | \ No newline at end of file diff --git a/docs/models/sources3unstructureddocumentformat.md b/docs/models/sources3unstructureddocumentformat.md index db1eacea..17e181f8 100644 --- a/docs/models/sources3unstructureddocumentformat.md +++ b/docs/models/sources3unstructureddocumentformat.md @@ -7,7 +7,7 @@ Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one rec | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `filetype` | [Optional[models.SourceS3SchemasStreamsFormatFormat5Filetype]](../models/sources3schemasstreamsformatformat5filetype.md) | :heavy_minus_sign: | N/A | +| `filetype` | [Optional[models.SourceS3SchemasStreamsFormatFormatFiletype]](../models/sources3schemasstreamsformatformatfiletype.md) | :heavy_minus_sign: | N/A | | `processing` | [Optional[models.SourceS3Processing]](../models/sources3processing.md) | :heavy_minus_sign: | Processing configuration | | `skip_unprocessable_files` | *Optional[bool]* | :heavy_minus_sign: | If true, skip files that cannot be parsed and pass the error message along as the _ab_source_file_parse_error field. If false, fail the sync. | | `strategy` | [Optional[models.SourceS3ParsingStrategy]](../models/sources3parsingstrategy.md) | :heavy_minus_sign: | The strategy used to parse documents. `fast` extracts text directly from the document which doesn't work for all files. `ocr_only` is more reliable, but slower. `hi_res` is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf | \ No newline at end of file diff --git a/docs/models/sourcesenseforce.md b/docs/models/sourcesenseforce.md index f8eae00d..d951f9dc 100644 --- a/docs/models/sourcesenseforce.md +++ b/docs/models/sourcesenseforce.md @@ -3,11 +3,10 @@ ## Fields -| Field | Type | Required | Description | Example | -| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `access_token` | *str* | :heavy_check_mark: | Your API access token. See here. The toke is case sensitive. | | -| `backend_url` | *str* | :heavy_check_mark: | Your Senseforce API backend URL. This is the URL shown during the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR) | https://galaxyapi.senseforce.io | -| `dataset_id` | *str* | :heavy_check_mark: | The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source). | 8f418098-ca28-4df5-9498-0df9fe78eda7 | -| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_check_mark: | UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later | 2017-01-25 | -| `slice_range` | *Optional[int]* | :heavy_minus_sign: | The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more. | 1 | -| `source_type` | [models.Senseforce](../models/senseforce.md) | :heavy_check_mark: | N/A | | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `access_token` | *str* | :heavy_check_mark: | Your API access token. See here. The toke is case sensitive. | | +| `backend_url` | *str* | :heavy_check_mark: | Your Senseforce API backend URL. This is the URL shown during the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR) | https://galaxyapi.senseforce.io | +| `dataset_id` | *str* | :heavy_check_mark: | The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source). | 8f418098-ca28-4df5-9498-0df9fe78eda7 | +| `start_date` | [datetime](https://docs.python.org/3/library/datetime.html#datetime-objects) | :heavy_check_mark: | UTC date and time in the format 2017-01-25. Only data with "Timestamp" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later | 2017-01-25 | +| `source_type` | [models.Senseforce](../models/senseforce.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcesftpbulkcsvformat.md b/docs/models/sourcesftpbulkcsvformat.md index 78971904..46265058 100644 --- a/docs/models/sourcesftpbulkcsvformat.md +++ b/docs/models/sourcesftpbulkcsvformat.md @@ -13,7 +13,6 @@ | `filetype` | [Optional[models.SourceSftpBulkSchemasFiletype]](../models/sourcesftpbulkschemasfiletype.md) | :heavy_minus_sign: | N/A | | `header_definition` | [Optional[models.SourceSftpBulkCSVHeaderDefinition]](../models/sourcesftpbulkcsvheaderdefinition.md) | :heavy_minus_sign: | How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. | | `ignore_errors_on_fields_mismatch` | *Optional[bool]* | :heavy_minus_sign: | Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. | -| `inference_type` | [Optional[models.SourceSftpBulkInferenceType]](../models/sourcesftpbulkinferencetype.md) | :heavy_minus_sign: | How to infer the types of the columns. If none, inference default to strings. | | `null_values` | List[*str*] | :heavy_minus_sign: | A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. | | `quote_char` | *Optional[str]* | :heavy_minus_sign: | The character used for quoting CSV values. To disallow quoting, make this field blank. | | `skip_rows_after_header` | *Optional[int]* | :heavy_minus_sign: | The number of rows to skip after the header row. | diff --git a/docs/models/sourcesftpbulkfilebasedstreamconfig.md b/docs/models/sourcesftpbulkfilebasedstreamconfig.md index 547083c1..28a13f50 100644 --- a/docs/models/sourcesftpbulkfilebasedstreamconfig.md +++ b/docs/models/sourcesftpbulkfilebasedstreamconfig.md @@ -10,8 +10,6 @@ | `days_to_sync_if_history_is_full` | *Optional[int]* | :heavy_minus_sign: | When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. | | `globs` | List[*str*] | :heavy_minus_sign: | The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. | | `input_schema` | *Optional[str]* | :heavy_minus_sign: | The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. | -| `legacy_prefix` | *Optional[str]* | :heavy_minus_sign: | The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. | -| `primary_key` | *Optional[str]* | :heavy_minus_sign: | The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key. | | `recent_n_files_to_read_for_schema_discovery` | *Optional[int]* | :heavy_minus_sign: | The number of resent files which will be used to discover the schema for this stream. | | `schemaless` | *Optional[bool]* | :heavy_minus_sign: | When enabled, syncs will not validate or structure records against the stream's schema. | | `validation_policy` | [Optional[models.SourceSftpBulkValidationPolicy]](../models/sourcesftpbulkvalidationpolicy.md) | :heavy_minus_sign: | The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. | \ No newline at end of file diff --git a/docs/models/sourcesftpbulkinferencetype.md b/docs/models/sourcesftpbulkinferencetype.md deleted file mode 100644 index 57571b94..00000000 --- a/docs/models/sourcesftpbulkinferencetype.md +++ /dev/null @@ -1,11 +0,0 @@ -# SourceSftpBulkInferenceType - -How to infer the types of the columns. If none, inference default to strings. - - -## Values - -| Name | Value | -| ---------------------- | ---------------------- | -| `NONE` | None | -| `PRIMITIVE_TYPES_ONLY` | Primitive Types Only | \ No newline at end of file diff --git a/docs/models/sourceshortcut.md b/docs/models/sourceshortcut.md new file mode 100644 index 00000000..e01d4307 --- /dev/null +++ b/docs/models/sourceshortcut.md @@ -0,0 +1,11 @@ +# SourceShortcut + + +## Fields + +| Field | Type | Required | Description | +| ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | +| `api_key_2` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `query` | *Optional[str]* | :heavy_minus_sign: | Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators` | +| `source_type` | [models.Shortcut](../models/shortcut.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesmartsheets.md b/docs/models/sourcesmartsheets.md index 6f4932b7..e28e9f0f 100644 --- a/docs/models/sourcesmartsheets.md +++ b/docs/models/sourcesmartsheets.md @@ -3,10 +3,9 @@ ## Fields -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | -| `credentials` | [models.SourceSmartsheetsAuthorizationMethod](../models/sourcesmartsheetsauthorizationmethod.md) | :heavy_check_mark: | N/A | | -| `spreadsheet_id` | *str* | :heavy_check_mark: | The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties | | -| `metadata_fields` | List[[models.Validenums](../models/validenums.md)] | :heavy_minus_sign: | A List of available columns which metadata can be pulled from. | | -| `source_type` | [models.SourceSmartsheetsSmartsheets](../models/sourcesmartsheetssmartsheets.md) | :heavy_check_mark: | N/A | | -| `start_datetime` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00` | 2000-01-01T13:00:00 | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------ | +| `credentials` | [models.SourceSmartsheetsAuthorizationMethod](../models/sourcesmartsheetsauthorizationmethod.md) | :heavy_check_mark: | N/A | +| `spreadsheet_id` | *str* | :heavy_check_mark: | The spreadsheet ID. Find it by opening the spreadsheet then navigating to File > Properties | +| `metadata_fields` | List[[models.Validenums](../models/validenums.md)] | :heavy_minus_sign: | A List of available columns which metadata can be pulled from. | +| `source_type` | [models.SourceSmartsheetsSmartsheets](../models/sourcesmartsheetssmartsheets.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcesurvicate.md b/docs/models/sourcesurvicate.md new file mode 100644 index 00000000..c6baf7b0 --- /dev/null +++ b/docs/models/sourcesurvicate.md @@ -0,0 +1,10 @@ +# SourceSurvicate + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `api_key` | *str* | :heavy_check_mark: | N/A | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `source_type` | [models.Survicate](../models/survicate.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourceteamwork.md b/docs/models/sourceteamwork.md new file mode 100644 index 00000000..39f01560 --- /dev/null +++ b/docs/models/sourceteamwork.md @@ -0,0 +1,12 @@ +# SourceTeamwork + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | +| `site_name` | *str* | :heavy_check_mark: | The teamwork site name appearing at the url | +| `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_check_mark: | N/A | +| `username` | *str* | :heavy_check_mark: | N/A | +| `password` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `source_type` | [models.Teamwork](../models/teamwork.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcewheniwork.md b/docs/models/sourcewheniwork.md new file mode 100644 index 00000000..7124718a --- /dev/null +++ b/docs/models/sourcewheniwork.md @@ -0,0 +1,10 @@ +# SourceWhenIWork + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `email` | *str* | :heavy_check_mark: | Email of your when-i-work account | +| `password` | *str* | :heavy_check_mark: | Password for your when-i-work account | +| `source_type` | [models.WhenIWork](../models/wheniwork.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/sourcezendesksell.md b/docs/models/sourcezendesksell.md deleted file mode 100644 index 1e6d534d..00000000 --- a/docs/models/sourcezendesksell.md +++ /dev/null @@ -1,9 +0,0 @@ -# SourceZendeskSell - - -## Fields - -| Field | Type | Required | Description | Example | -| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | -| `api_token` | *str* | :heavy_check_mark: | The API token for authenticating to Zendesk Sell | f23yhd630otl94y85a8bf384958473pto95847fd006da49382716or937ruw059 | -| `source_type` | [models.ZendeskSell](../models/zendesksell.md) | :heavy_check_mark: | N/A | | \ No newline at end of file diff --git a/docs/models/sourcezendesksupport.md b/docs/models/sourcezendesksupport.md index c5fe26cb..ea055433 100644 --- a/docs/models/sourcezendesksupport.md +++ b/docs/models/sourcezendesksupport.md @@ -7,6 +7,5 @@ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `subdomain` | *str* | :heavy_check_mark: | This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain. | | | `credentials` | [Optional[models.SourceZendeskSupportAuthentication]](../models/sourcezendesksupportauthentication.md) | :heavy_minus_sign: | Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. | | -| `ignore_pagination` | *Optional[bool]* | :heavy_minus_sign: | Makes each stream read a single page of data. | | | `source_type` | [models.SourceZendeskSupportZendeskSupport](../models/sourcezendesksupportzendesksupport.md) | :heavy_check_mark: | N/A | | | `start_date` | [date](https://docs.python.org/3/library/datetime.html#date-objects) | :heavy_minus_sign: | The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. | 2020-10-15T00:00:00Z | \ No newline at end of file diff --git a/docs/models/sslmethod.md b/docs/models/sslmethod.md index fe584132..9fe0464b 100644 --- a/docs/models/sslmethod.md +++ b/docs/models/sslmethod.md @@ -5,6 +5,12 @@ The encryption method which is used to communicate with the database. ## Supported Types +### `models.Unencrypted` + +```python +value: models.Unencrypted = /* values here */ +``` + ### `models.EncryptedTrustServerCertificate` ```python diff --git a/docs/models/standalonemongodbinstance.md b/docs/models/standalonemongodbinstance.md index 632e0f70..29a5c8dd 100644 --- a/docs/models/standalonemongodbinstance.md +++ b/docs/models/standalonemongodbinstance.md @@ -3,8 +3,9 @@ ## Fields -| Field | Type | Required | Description | Example | -| -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -------------------------------------------------- | -| `host` | *str* | :heavy_check_mark: | The Host of a Mongo database to be replicated. | | -| `instance` | [Optional[models.Instance]](../models/instance.md) | :heavy_minus_sign: | N/A | | -| `port` | *Optional[int]* | :heavy_minus_sign: | The Port of a Mongo database to be replicated. | 27017 | \ No newline at end of file +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `host` | *str* | :heavy_check_mark: | The Host of a Mongo database to be replicated. | | +| `instance` | [Optional[models.Instance]](../models/instance.md) | :heavy_minus_sign: | N/A | | +| `port` | *Optional[int]* | :heavy_minus_sign: | The Port of a Mongo database to be replicated. | 27017 | +| `tls` | *Optional[bool]* | :heavy_minus_sign: | Indicates whether TLS encryption protocol will be used to connect to MongoDB. It is recommended to use TLS connection if possible. For more information see documentation. | | \ No newline at end of file diff --git a/docs/models/storageprovider.md b/docs/models/storageprovider.md index e5323533..697335e8 100644 --- a/docs/models/storageprovider.md +++ b/docs/models/storageprovider.md @@ -17,10 +17,10 @@ value: models.HTTPSPublicWeb = /* values here */ value: models.GCSGoogleCloudStorage = /* values here */ ``` -### `models.SourceFileS3AmazonWebServices` +### `models.S3AmazonWebServices` ```python -value: models.SourceFileS3AmazonWebServices = /* values here */ +value: models.S3AmazonWebServices = /* values here */ ``` ### `models.AzBlobAzureBlobStorage` @@ -47,3 +47,9 @@ value: models.SCPSecureCopyProtocol = /* values here */ value: models.SFTPSecureFileTransferProtocol = /* values here */ ``` +### `models.LocalFilesystemLimited` + +```python +value: models.LocalFilesystemLimited = /* values here */ +``` + diff --git a/docs/models/survicate.md b/docs/models/survicate.md new file mode 100644 index 00000000..2c24f481 --- /dev/null +++ b/docs/models/survicate.md @@ -0,0 +1,8 @@ +# Survicate + + +## Values + +| Name | Value | +| ----------- | ----------- | +| `SURVICATE` | survicate | \ No newline at end of file diff --git a/docs/models/targetstype.md b/docs/models/targetstype.md new file mode 100644 index 00000000..26fcd884 --- /dev/null +++ b/docs/models/targetstype.md @@ -0,0 +1,11 @@ +# TargetsType + + +## Values + +| Name | Value | +| ---------------------------- | ---------------------------- | +| `WSN_STATION_NUMBERS` | WSN station numbers | +| `CALIFORNIA_ZIP_CODES` | California zip codes | +| `DECIMAL_DEGREE_COORDINATES` | decimal-degree coordinates | +| `STREET_ADDRESSES` | street addresses | \ No newline at end of file diff --git a/docs/models/teamwork.md b/docs/models/teamwork.md new file mode 100644 index 00000000..d439de11 --- /dev/null +++ b/docs/models/teamwork.md @@ -0,0 +1,8 @@ +# Teamwork + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `TEAMWORK` | teamwork | \ No newline at end of file diff --git a/docs/models/testdestination.md b/docs/models/testdestination.md deleted file mode 100644 index 0c251056..00000000 --- a/docs/models/testdestination.md +++ /dev/null @@ -1,13 +0,0 @@ -# TestDestination - -The type of destination to be used - - -## Supported Types - -### `models.Silent` - -```python -value: models.Silent = /* values here */ -``` - diff --git a/docs/models/tlsencryptedverifycertificate.md b/docs/models/tlsencryptedverifycertificate.md index 41dfe16b..40f6f189 100644 --- a/docs/models/tlsencryptedverifycertificate.md +++ b/docs/models/tlsencryptedverifycertificate.md @@ -8,4 +8,4 @@ Verify and use the certificate provided by the server. | Field | Type | Required | Description | | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | | `ssl_certificate` | *str* | :heavy_check_mark: | Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations. | -| `encryption_method` | [models.SourceOracleEncryptionMethod](../models/sourceoracleencryptionmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| `encryption_method` | [Optional[models.DestinationOracleSchemasEncryptionMethod]](../models/destinationoracleschemasencryptionmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/type.md b/docs/models/type.md deleted file mode 100644 index 1e56d586..00000000 --- a/docs/models/type.md +++ /dev/null @@ -1,8 +0,0 @@ -# Type - - -## Values - -| Name | Value | -| ----------------- | ----------------- | -| `CONTINUOUS_FEED` | CONTINUOUS_FEED | \ No newline at end of file diff --git a/docs/models/unencrypted.md b/docs/models/unencrypted.md index 28c5d145..5790f31b 100644 --- a/docs/models/unencrypted.md +++ b/docs/models/unencrypted.md @@ -1,10 +1,10 @@ # Unencrypted -Data transfer will not be encrypted. +The data transfer will not be encrypted. ## Fields -| Field | Type | Required | Description | -| ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | ------------------------------------------------------------------------------ | -| `ssl_method` | [models.SourceMssqlSchemasSslMethod](../models/sourcemssqlschemassslmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------ | +| `ssl_method` | [Optional[models.DestinationMssqlSslMethod]](../models/destinationmssqlsslmethod.md) | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/docs/models/unexpectedfieldbehavior.md b/docs/models/unexpectedfieldbehavior.md deleted file mode 100644 index dc09c75a..00000000 --- a/docs/models/unexpectedfieldbehavior.md +++ /dev/null @@ -1,12 +0,0 @@ -# UnexpectedFieldBehavior - -How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details - - -## Values - -| Name | Value | -| -------- | -------- | -| `IGNORE` | ignore | -| `INFER` | infer | -| `ERROR` | error | \ No newline at end of file diff --git a/docs/models/unitofmeasure.md b/docs/models/unitofmeasure.md new file mode 100644 index 00000000..01c03612 --- /dev/null +++ b/docs/models/unitofmeasure.md @@ -0,0 +1,9 @@ +# UnitOfMeasure + + +## Values + +| Name | Value | +| ----- | ----- | +| `E` | E | +| `M` | M | \ No newline at end of file diff --git a/docs/models/usernamepassword.md b/docs/models/usernamepassword.md index 2aedc445..a8c301ba 100644 --- a/docs/models/usernamepassword.md +++ b/docs/models/usernamepassword.md @@ -5,8 +5,8 @@ Basic auth header with a username and password ## Fields -| Field | Type | Required | Description | -| -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | -| `password` | *str* | :heavy_check_mark: | Basic auth password to access a secure Elasticsearch server | -| `username` | *str* | :heavy_check_mark: | Basic auth username to access a secure Elasticsearch server | -| `method` | [models.DestinationElasticsearchSchemasMethod](../models/destinationelasticsearchschemasmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file +| Field | Type | Required | Description | +| ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------ | +| `password` | *str* | :heavy_check_mark: | Basic auth password to access a secure Elasticsearch server | +| `username` | *str* | :heavy_check_mark: | Basic auth username to access a secure Elasticsearch server | +| `method` | [models.DestinationElasticsearchSchemasAuthenticationMethodMethod](../models/destinationelasticsearchschemasauthenticationmethodmethod.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/docs/models/wheniwork.md b/docs/models/wheniwork.md new file mode 100644 index 00000000..af9c1019 --- /dev/null +++ b/docs/models/wheniwork.md @@ -0,0 +1,8 @@ +# WhenIWork + + +## Values + +| Name | Value | +| ------------- | ------------- | +| `WHEN_I_WORK` | when-i-work | \ No newline at end of file diff --git a/docs/sdks/connections/README.md b/docs/sdks/connections/README.md index e9c6b40d..386080a8 100644 --- a/docs/sdks/connections/README.md +++ b/docs/sdks/connections/README.md @@ -163,7 +163,7 @@ List connections ```python import airbyte_api -from airbyte_api import models +from airbyte_api import api, models s = airbyte_api.AirbyteAPI( security=models.Security( @@ -175,7 +175,7 @@ s = airbyte_api.AirbyteAPI( ) -res = s.connections.list_connections() +res = s.connections.list_connections(request=api.ListConnectionsRequest()) if res.connections_response is not None: # handle response diff --git a/docs/sdks/destinations/README.md b/docs/sdks/destinations/README.md index cdeedbe7..7552706f 100644 --- a/docs/sdks/destinations/README.md +++ b/docs/sdks/destinations/README.md @@ -34,21 +34,19 @@ s = airbyte_api.AirbyteAPI( res = s.destinations.create_destination(request=models.DestinationCreateRequest( configuration=models.DestinationPinecone( - embedding=models.DestinationPineconeOpenAI( - openai_key='', - ), + embedding=models.DestinationPineconeFake(), indexing=models.DestinationPineconeIndexing( index='', pinecone_environment='us-west1-gcp', pinecone_key='', ), processing=models.DestinationPineconeProcessingConfigModel( - chunk_size=45493, + chunk_size=834173, metadata_fields=[ 'user', ], text_fields=[ - 'user.name', + 'users.*.name', ], ), ), @@ -181,7 +179,7 @@ List destinations ```python import airbyte_api -from airbyte_api import models +from airbyte_api import api, models s = airbyte_api.AirbyteAPI( security=models.Security( @@ -193,7 +191,7 @@ s = airbyte_api.AirbyteAPI( ) -res = s.destinations.list_destinations() +res = s.destinations.list_destinations(request=api.ListDestinationsRequest()) if res.destinations_response is not None: # handle response @@ -241,8 +239,9 @@ s = airbyte_api.AirbyteAPI( res = s.destinations.patch_destination(request=api.PatchDestinationRequest( destination_id='', destination_patch_request=models.DestinationPatchRequest( - configuration=models.DestinationDevNull( - test_destination=models.Silent(), + configuration=models.DestinationDuckdb( + destination_path='motherduck:', + schema='main', ), name='My Destination', ), @@ -296,8 +295,8 @@ res = s.destinations.put_destination(request=api.PutDestinationRequest( destination_put_request=models.DestinationPutRequest( configuration=models.DestinationClickhouse( database='', - host='aggravating-town.info', - username='Myah.Spencer55', + host='urban-receptor.org', + username='Kaylie_Terry', port=8123, tunnel_method=models.SSHKeyAuthentication( ssh_key='', diff --git a/docs/sdks/permissions/README.md b/docs/sdks/permissions/README.md index db857a53..b09dd055 100644 --- a/docs/sdks/permissions/README.md +++ b/docs/sdks/permissions/README.md @@ -162,7 +162,7 @@ List Permissions by user id ```python import airbyte_api -from airbyte_api import models +from airbyte_api import api, models s = airbyte_api.AirbyteAPI( security=models.Security( @@ -174,7 +174,7 @@ s = airbyte_api.AirbyteAPI( ) -res = s.permissions.list_permissions() +res = s.permissions.list_permissions(request=api.ListPermissionsRequest()) if res.permissions_response is not None: # handle response diff --git a/docs/sdks/sources/README.md b/docs/sdks/sources/README.md index d9dc4899..283d8593 100644 --- a/docs/sdks/sources/README.md +++ b/docs/sdks/sources/README.md @@ -21,6 +21,7 @@ Creates a source given a name, workspace id, and a json blob containing the conf ```python import airbyte_api +import dateutil.parser from airbyte_api import models s = airbyte_api.AirbyteAPI( @@ -34,10 +35,33 @@ s = airbyte_api.AirbyteAPI( res = s.sources.create_source(request=models.SourceCreateRequest( - configuration=models.SourcePocket( - access_token='', - consumer_key='', - since='2022-10-20 14:14:14', + configuration=models.SourcePinterest( + custom_reports=[ + models.ReportConfig( + columns=[ + models.SourcePinterestSchemasValidEnums.TOTAL_REPIN_RATE, + ], + name='', + start_date=dateutil.parser.parse('2022-07-28').date(), + ), + models.ReportConfig( + columns=[ + models.SourcePinterestSchemasValidEnums.TOTAL_VIEW_LEAD, + ], + name='', + start_date=dateutil.parser.parse('2022-07-28').date(), + ), + models.ReportConfig( + columns=[ + models.SourcePinterestSchemasValidEnums.TOTAL_WEB_ENGAGEMENT_CHECKOUT, + models.SourcePinterestSchemasValidEnums.TOTAL_VIEW_LEAD, + models.SourcePinterestSchemasValidEnums.TOTAL_ENGAGEMENT_CHECKOUT, + ], + name='', + start_date=dateutil.parser.parse('2022-07-28').date(), + ), + ], + start_date=dateutil.parser.parse('2022-07-28').date(), ), name='My Source', workspace_id='744cc0ed-7f05-4949-9e60-2a814f90c035', @@ -270,7 +294,6 @@ Update a Source ```python import airbyte_api -import dateutil.parser from airbyte_api import api, models s = airbyte_api.AirbyteAPI( @@ -286,10 +309,8 @@ s = airbyte_api.AirbyteAPI( res = s.sources.patch_source(request=api.PatchSourceRequest( source_id='', source_patch_request=models.SourcePatchRequest( - configuration=models.SourceExchangeRates( - access_key='', - start_date=dateutil.parser.parse('YYYY-MM-DD').date(), - base='EUR', + configuration=models.SourceDremio( + api_key='', ), name='My Source', workspace_id='744cc0ed-7f05-4949-9e60-2a814f90c035', @@ -327,6 +348,7 @@ Update a Source and fully overwrite it ```python import airbyte_api +import dateutil.parser from airbyte_api import api, models s = airbyte_api.AirbyteAPI( @@ -342,7 +364,10 @@ s = airbyte_api.AirbyteAPI( res = s.sources.put_source(request=api.PutSourceRequest( source_id='', source_put_request=models.SourcePutRequest( - configuration=models.SourceHardcodedRecords(), + configuration=models.SourceGoogleTasks( + api_key='', + start_date=dateutil.parser.isoparse('2024-10-11T13:59:33.977Z'), + ), name='My Source', ), )) diff --git a/docs/sdks/workspaces/README.md b/docs/sdks/workspaces/README.md index 66ac4f65..91ee078c 100644 --- a/docs/sdks/workspaces/README.md +++ b/docs/sdks/workspaces/README.md @@ -37,8 +37,8 @@ res = s.workspaces.create_or_update_workspace_o_auth_credentials(request=api.Cre workspace_o_auth_credentials_request=models.WorkspaceOAuthCredentialsRequest( actor_type=models.ActorTypeEnum.DESTINATION, configuration={ - 'user': 'charles', - }, + 'user': 'charles', + }, name=models.OAuthActorNames.AMAZON_ADS, ), workspace_id='', @@ -216,7 +216,7 @@ List workspaces ```python import airbyte_api -from airbyte_api import models +from airbyte_api import api, models s = airbyte_api.AirbyteAPI( security=models.Security( @@ -228,7 +228,7 @@ s = airbyte_api.AirbyteAPI( ) -res = s.workspaces.list_workspaces() +res = s.workspaces.list_workspaces(request=api.ListWorkspacesRequest()) if res.workspaces_response is not None: # handle response diff --git a/gen.yaml b/gen.yaml index 8ae06e0a..78f534d1 100644 --- a/gen.yaml +++ b/gen.yaml @@ -11,7 +11,7 @@ generation: auth: oAuth2ClientCredentialsEnabled: true python: - version: 0.52.0 + version: 0.52.1 additionalDependencies: dependencies: {} extraDependencies: diff --git a/setup.py b/setup.py index ddd5ba7b..cd924ae9 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setuptools.setup( name='airbyte-api', - version='0.52.0', + version='0.52.1', author='Airbyte', description='Python Client SDK for Airbyte API', url='https://github.com/airbytehq/airbyte-api-python-sdk.git', diff --git a/src/airbyte_api/models/__init__.py b/src/airbyte_api/models/__init__.py index 001375fd..01b10504 100644 --- a/src/airbyte_api/models/__init__.py +++ b/src/airbyte_api/models/__init__.py @@ -22,7 +22,6 @@ from .destination_clickhouse import * from .destination_convex import * from .destination_databricks import * -from .destination_dev_null import * from .destination_duckdb import * from .destination_dynamodb import * from .destination_elasticsearch import * @@ -35,6 +34,7 @@ from .destination_mssql import * from .destination_mysql import * from .destination_oracle import * +from .destination_pgvector import * from .destination_pinecone import * from .destination_postgres import * from .destination_pubsub import * @@ -115,6 +115,7 @@ from .smartsheets import * from .snapchat_marketing import * from .snowflake import * +from .source_7shifts import * from .source_aha import * from .source_airbyte import * from .source_aircall import * @@ -124,6 +125,8 @@ from .source_amazon_sqs import * from .source_amplitude import * from .source_apify_dataset import * +from .source_appcues import * +from .source_appfigures import * from .source_appfollow import * from .source_asana import * from .source_auth0 import * @@ -134,13 +137,20 @@ from .source_basecamp import * from .source_bigquery import * from .source_bing_ads import * +from .source_bitly import * from .source_braintree import * from .source_braze import * from .source_breezy_hr import * +from .source_brevo import * +from .source_buildkite import * +from .source_buzzsprout import * from .source_calendly import * +from .source_canny import * from .source_cart import * +from .source_chameleon import * from .source_chargebee import * from .source_chartmogul import * +from .source_cimis import * from .source_clazar import * from .source_clickhouse import * from .source_clickup_api import * @@ -161,9 +171,9 @@ from .source_dockerhub import * from .source_dremio import * from .source_dynamodb import * -from .source_e2e_test_cloud import * from .source_emailoctopus import * from .source_exchange_rates import * +from .source_ezofficeinventory import * from .source_facebook_marketing import * from .source_faker import * from .source_fauna import * @@ -173,6 +183,7 @@ from .source_freshcaller import * from .source_freshdesk import * from .source_freshsales import * +from .source_front import * from .source_gainsight_px import * from .source_gcs import * from .source_getlago import * @@ -188,11 +199,14 @@ from .source_google_pagespeed_insights import * from .source_google_search_console import * from .source_google_sheets import * +from .source_google_tasks import * from .source_google_webfonts import * from .source_greenhouse import * from .source_gridly import * +from .source_guru import * from .source_hardcoded_records import * from .source_harvest import * +from .source_height import * from .source_hibob import * from .source_high_level import * from .source_hubplanner import * @@ -204,6 +218,7 @@ from .source_ip2whois import * from .source_iterable import * from .source_jira import * +from .source_jotform import * from .source_k6_cloud import * from .source_klarna import * from .source_klaviyo import * @@ -235,6 +250,7 @@ from .source_netsuite import * from .source_northpass_lms import * from .source_notion import * +from .source_nylas import * from .source_nytimes import * from .source_okta import * from .source_omnisend import * @@ -250,8 +266,10 @@ from .source_pennylane import * from .source_persistiq import * from .source_pexels_api import * +from .source_picqer import * from .source_pinterest import * from .source_pipedrive import * +from .source_piwik import * from .source_planhat import * from .source_pocket import * from .source_pokeapi import * @@ -260,6 +278,8 @@ from .source_posthog import * from .source_postmarkapp import * from .source_prestashop import * +from .source_productboard import * +from .source_productive import * from .source_pypi import * from .source_qualaroo import * from .source_railz import * @@ -286,6 +306,7 @@ from .source_sftp import * from .source_sftp_bulk import * from .source_shopify import * +from .source_shortcut import * from .source_shortio import * from .source_slack import * from .source_smaily import * @@ -300,6 +321,8 @@ from .source_stripe import * from .source_survey_sparrow import * from .source_surveymonkey import * +from .source_survicate import * +from .source_teamwork import * from .source_tempo import * from .source_the_guardian_api import * from .source_tiktok_marketing import * @@ -313,6 +336,7 @@ from .source_us_census import * from .source_vantage import * from .source_webflow import * +from .source_when_i_work import * from .source_whisky_hunter import * from .source_wikipedia_pageviews import * from .source_woocommerce import * @@ -321,7 +345,6 @@ from .source_yotpo import * from .source_youtube_analytics import * from .source_zendesk_chat import * -from .source_zendesk_sell import * from .source_zendesk_sunshine import * from .source_zendesk_support import * from .source_zendesk_talk import * @@ -354,4 +377,4 @@ from .zendesk_support import * from .zendesk_talk import * -__all__ = ["APIAccessToken","APIKey","APIKeyAuth","APIKeySecret","APIParameterConfigModel","APIPassword","APIToken","AWSEnvironment","AWSRegion","AWSS3Staging","AWSSellerPartnerAccountType","AccessToken","AccessTokenIsRequiredForAuthenticationRequests","AccountNames","ActionReportTime","ActorTypeEnum","AdAnalyticsReportConfiguration","Aha","Airbyte","AirbyteAPIConnectionSchedule","Aircall","Airtable","Allow","AmazonAds","AmazonSellerPartner","AmazonSqs","Amplitude","AndGroup","ApifyDataset","Appfollow","Applications","Asana","AsanaCredentials","Astra","Auth0","AuthMethod","AuthType","AuthenticateViaAPIKey","AuthenticateViaAccessKeys","AuthenticateViaAsanaOauth","AuthenticateViaFacebookMarketingOauth","AuthenticateViaGoogleOauth","AuthenticateViaHarvestOAuth","AuthenticateViaLeverAPIKey","AuthenticateViaLeverOAuth","AuthenticateViaMicrosoft","AuthenticateViaMicrosoftOAuth","AuthenticateViaMicrosoftOAuth20","AuthenticateViaOAuth","AuthenticateViaOAuth20","AuthenticateViaOauth2","AuthenticateViaPassword","AuthenticateViaPrivateKey","AuthenticateViaRetentlyOAuth","AuthenticateViaStorageAccountKey","AuthenticateWithAPIToken","AuthenticateWithPersonalAccessToken","Authentication","AuthenticationMechanism","AuthenticationMethod","AuthenticationMode","AuthenticationType","AuthenticationViaGoogleOAuth","AuthenticationWildcard","Authorization","AuthorizationMethod","AuthorizationType","Autogenerated","Avro","AvroApacheAvro","AvroFormat","AwsCloudtrail","AwsDatalake","AzBlobAzureBlobStorage","AzureBlobStorage","AzureBlobStorageCredentials","AzureOpenAI","AzureTable","BambooHr","BaseURL","Basecamp","BatchedStandardInserts","BetweenFilter","Bigquery","BingAds","BothUsernameAndPasswordIsRequiredForAuthenticationRequest","Braintree","Braze","BreezyHr","ByMarkdownHeader","ByProgrammingLanguage","BySeparator","Bzip2","CSVCommaSeparatedValues","CSVFormat","CSVHeaderDefinition","CacheType","Calendly","CaptureModeAdvanced","Cart","Categories","CentralAPIRouter","Chargebee","Chartmogul","ChooseHowToPartitionData","Clazar","ClickWindowDays","Clickhouse","ClickupAPI","Clockify","CloseCom","ClusterType","Coda","Codec","Cohere","CohortReportSettings","CohortReports","Cohorts","CohortsRange","CoinAPI","Coinmarketcap","Collection","Compression","CompressionCodec","CompressionCodecOptional","CompressionType","Configcat","Confluence","ConnectBy","ConnectionCreateRequest","ConnectionPatchRequest","ConnectionResponse","ConnectionScheduleResponse","ConnectionStatusEnum","ConnectionSyncModeEnum","ConnectionType","ConnectionsResponse","ContentType","ContinuousFeed","ConversionReportTime","Convex","Country","Credential","CredentialType","Credentials","CredentialsTitle","Csv","CustomQueriesArray","CustomReportConfig","CustomerIo","CustomerStatus","DataCenterLocation","DataFreshness","DataRegion","DataSource","DataType","Databricks","Datadog","Datascope","DatasetLocation","DateRange","Dbt","DefaultVectorizer","Deflate","DeletionMode","Delighted","DestinationAstra","DestinationAstraLanguage","DestinationAstraMode","DestinationAstraSchemasEmbeddingEmbedding1Mode","DestinationAstraSchemasEmbeddingEmbeddingMode","DestinationAstraSchemasEmbeddingMode","DestinationAstraSchemasMode","DestinationAstraSchemasProcessingMode","DestinationAstraSchemasProcessingTextSplitterMode","DestinationAstraSchemasProcessingTextSplitterTextSplitterMode","DestinationAwsDatalake","DestinationAwsDatalakeCompressionCodecOptional","DestinationAwsDatalakeCredentialsTitle","DestinationAwsDatalakeFormatTypeWildcard","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorage","DestinationAzureBlobStorageFormatType","DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON","DestinationBigquery","DestinationBigqueryCredentialType","DestinationBigqueryHMACKey","DestinationBigqueryMethod","DestinationClickhouse","DestinationClickhouseSchemasTunnelMethod","DestinationClickhouseTunnelMethod","DestinationConfiguration","DestinationConvex","DestinationCreateRequest","DestinationDatabricks","DestinationDatabricksAuthType","DestinationDatabricksSchemasAuthType","DestinationDevNull","DestinationDuckdb","DestinationDynamodb","DestinationElasticsearch","DestinationElasticsearchMethod","DestinationElasticsearchSchemasMethod","DestinationFirebolt","DestinationFireboltLoadingMethod","DestinationFireboltMethod","DestinationFireboltSchemasMethod","DestinationFirestore","DestinationGcs","DestinationGcsAuthentication","DestinationGcsCSVCommaSeparatedValues","DestinationGcsCodec","DestinationGcsCompression","DestinationGcsCompressionCodec","DestinationGcsCompressionType","DestinationGcsFormatType","DestinationGcsGZIP","DestinationGcsJSONLinesNewlineDelimitedJSON","DestinationGcsNoCompression","DestinationGcsOutputFormat","DestinationGcsParquetColumnarStorage","DestinationGcsSchemasCodec","DestinationGcsSchemasCompressionType","DestinationGcsSchemasFormatCodec","DestinationGcsSchemasFormatCompressionType","DestinationGcsSchemasFormatFormatType","DestinationGcsSchemasFormatOutputFormat1Codec","DestinationGcsSchemasFormatOutputFormatCodec","DestinationGcsSchemasFormatOutputFormatFormatType","DestinationGcsSchemasFormatType","DestinationGcsSchemasNoCompression","DestinationGoogleSheets","DestinationGoogleSheetsGoogleSheets","DestinationMilvus","DestinationMilvusAPIToken","DestinationMilvusAuthentication","DestinationMilvusAzureOpenAI","DestinationMilvusByMarkdownHeader","DestinationMilvusByProgrammingLanguage","DestinationMilvusBySeparator","DestinationMilvusCohere","DestinationMilvusEmbedding","DestinationMilvusFake","DestinationMilvusFieldNameMappingConfigModel","DestinationMilvusIndexing","DestinationMilvusLanguage","DestinationMilvusMode","DestinationMilvusOpenAI","DestinationMilvusOpenAICompatible","DestinationMilvusProcessingConfigModel","DestinationMilvusSchemasEmbeddingEmbedding5Mode","DestinationMilvusSchemasEmbeddingEmbeddingMode","DestinationMilvusSchemasEmbeddingMode","DestinationMilvusSchemasIndexingAuthAuthenticationMode","DestinationMilvusSchemasIndexingAuthMode","DestinationMilvusSchemasIndexingMode","DestinationMilvusSchemasMode","DestinationMilvusSchemasProcessingMode","DestinationMilvusSchemasProcessingTextSplitterMode","DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode","DestinationMilvusTextSplitter","DestinationMilvusUsernamePassword","DestinationMongodb","DestinationMongodbAuthorization","DestinationMongodbInstance","DestinationMongodbNoTunnel","DestinationMongodbPasswordAuthentication","DestinationMongodbSSHKeyAuthentication","DestinationMongodbSSHTunnelMethod","DestinationMongodbSchemasAuthorization","DestinationMongodbSchemasInstance","DestinationMongodbSchemasTunnelMethod","DestinationMongodbSchemasTunnelMethodTunnelMethod","DestinationMongodbTunnelMethod","DestinationMssql","DestinationMssqlNoTunnel","DestinationMssqlPasswordAuthentication","DestinationMssqlSSHKeyAuthentication","DestinationMssqlSSHTunnelMethod","DestinationMssqlSchemasSslMethod","DestinationMssqlSchemasTunnelMethod","DestinationMssqlSchemasTunnelMethodTunnelMethod","DestinationMssqlSslMethod","DestinationMssqlTunnelMethod","DestinationMysql","DestinationMysqlNoTunnel","DestinationMysqlPasswordAuthentication","DestinationMysqlSSHKeyAuthentication","DestinationMysqlSSHTunnelMethod","DestinationMysqlSchemasTunnelMethod","DestinationMysqlSchemasTunnelMethodTunnelMethod","DestinationMysqlTunnelMethod","DestinationOracle","DestinationOracleNoTunnel","DestinationOraclePasswordAuthentication","DestinationOracleSSHKeyAuthentication","DestinationOracleSSHTunnelMethod","DestinationOracleSchemasTunnelMethod","DestinationOracleSchemasTunnelMethodTunnelMethod","DestinationOracleTunnelMethod","DestinationPatchRequest","DestinationPinecone","DestinationPineconeAzureOpenAI","DestinationPineconeByMarkdownHeader","DestinationPineconeByProgrammingLanguage","DestinationPineconeBySeparator","DestinationPineconeCohere","DestinationPineconeEmbedding","DestinationPineconeFake","DestinationPineconeFieldNameMappingConfigModel","DestinationPineconeIndexing","DestinationPineconeLanguage","DestinationPineconeMode","DestinationPineconeOpenAI","DestinationPineconeOpenAICompatible","DestinationPineconeProcessingConfigModel","DestinationPineconeSchemasEmbeddingEmbedding5Mode","DestinationPineconeSchemasEmbeddingEmbeddingMode","DestinationPineconeSchemasEmbeddingMode","DestinationPineconeSchemasMode","DestinationPineconeSchemasProcessingMode","DestinationPineconeSchemasProcessingTextSplitterMode","DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode","DestinationPineconeTextSplitter","DestinationPostgres","DestinationPostgresMode","DestinationPostgresNoTunnel","DestinationPostgresPasswordAuthentication","DestinationPostgresSSHKeyAuthentication","DestinationPostgresSSHTunnelMethod","DestinationPostgresSchemasMode","DestinationPostgresSchemasSSLModeSSLModes6Mode","DestinationPostgresSchemasSSLModeSSLModesMode","DestinationPostgresSchemasSslModeMode","DestinationPostgresSchemasTunnelMethod","DestinationPostgresSchemasTunnelMethodTunnelMethod","DestinationPostgresTunnelMethod","DestinationPubsub","DestinationPutRequest","DestinationQdrant","DestinationQdrantAuthenticationMethod","DestinationQdrantAzureOpenAI","DestinationQdrantByMarkdownHeader","DestinationQdrantByProgrammingLanguage","DestinationQdrantBySeparator","DestinationQdrantCohere","DestinationQdrantEmbedding","DestinationQdrantFake","DestinationQdrantFieldNameMappingConfigModel","DestinationQdrantIndexing","DestinationQdrantLanguage","DestinationQdrantMode","DestinationQdrantNoAuth","DestinationQdrantOpenAI","DestinationQdrantOpenAICompatible","DestinationQdrantProcessingConfigModel","DestinationQdrantSchemasEmbeddingEmbedding5Mode","DestinationQdrantSchemasEmbeddingEmbeddingMode","DestinationQdrantSchemasEmbeddingMode","DestinationQdrantSchemasIndexingAuthMethodMode","DestinationQdrantSchemasIndexingMode","DestinationQdrantSchemasMode","DestinationQdrantSchemasProcessingMode","DestinationQdrantSchemasProcessingTextSplitterMode","DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode","DestinationQdrantTextSplitter","DestinationRedis","DestinationRedisDisable","DestinationRedisMode","DestinationRedisNoTunnel","DestinationRedisPasswordAuthentication","DestinationRedisSSHKeyAuthentication","DestinationRedisSSHTunnelMethod","DestinationRedisSSLModes","DestinationRedisSchemasMode","DestinationRedisSchemasTunnelMethod","DestinationRedisSchemasTunnelMethodTunnelMethod","DestinationRedisTunnelMethod","DestinationRedisVerifyFull","DestinationRedshift","DestinationRedshiftMethod","DestinationRedshiftNoTunnel","DestinationRedshiftPasswordAuthentication","DestinationRedshiftS3BucketRegion","DestinationRedshiftSSHKeyAuthentication","DestinationRedshiftSSHTunnelMethod","DestinationRedshiftSchemasTunnelMethod","DestinationRedshiftSchemasTunnelMethodTunnelMethod","DestinationRedshiftTunnelMethod","DestinationResponse","DestinationS3","DestinationS3AvroApacheAvro","DestinationS3Bzip2","DestinationS3CSVCommaSeparatedValues","DestinationS3Codec","DestinationS3Compression","DestinationS3CompressionCodec","DestinationS3CompressionType","DestinationS3Deflate","DestinationS3Flattening","DestinationS3FormatType","DestinationS3GZIP","DestinationS3Glue","DestinationS3GlueCompression","DestinationS3GlueCompressionType","DestinationS3GlueFormatType","DestinationS3GlueGZIP","DestinationS3GlueJSONLinesNewlineDelimitedJSON","DestinationS3GlueNoCompression","DestinationS3GlueOutputFormat","DestinationS3GlueS3BucketRegion","DestinationS3GlueSchemasCompressionType","DestinationS3JSONLinesNewlineDelimitedJSON","DestinationS3NoCompression","DestinationS3OutputFormat","DestinationS3ParquetColumnarStorage","DestinationS3S3BucketRegion","DestinationS3SchemasCodec","DestinationS3SchemasCompression","DestinationS3SchemasCompressionCodec","DestinationS3SchemasCompressionType","DestinationS3SchemasFlattening","DestinationS3SchemasFormatCodec","DestinationS3SchemasFormatCompressionType","DestinationS3SchemasFormatFormatType","DestinationS3SchemasFormatNoCompression","DestinationS3SchemasFormatOutputFormat3Codec","DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec","DestinationS3SchemasFormatOutputFormatCodec","DestinationS3SchemasFormatOutputFormatCompressionType","DestinationS3SchemasFormatOutputFormatFormatType","DestinationS3SchemasFormatType","DestinationS3SchemasGZIP","DestinationS3SchemasNoCompression","DestinationS3Snappy","DestinationS3Xz","DestinationS3Zstandard","DestinationSftpJSON","DestinationSnowflake","DestinationSnowflakeAuthType","DestinationSnowflakeCortex","DestinationSnowflakeCortexAzureOpenAI","DestinationSnowflakeCortexByMarkdownHeader","DestinationSnowflakeCortexByProgrammingLanguage","DestinationSnowflakeCortexBySeparator","DestinationSnowflakeCortexCohere","DestinationSnowflakeCortexCredentials","DestinationSnowflakeCortexEmbedding","DestinationSnowflakeCortexFake","DestinationSnowflakeCortexFieldNameMappingConfigModel","DestinationSnowflakeCortexLanguage","DestinationSnowflakeCortexMode","DestinationSnowflakeCortexOpenAI","DestinationSnowflakeCortexOpenAICompatible","DestinationSnowflakeCortexProcessingConfigModel","DestinationSnowflakeCortexSchemasEmbeddingEmbedding5Mode","DestinationSnowflakeCortexSchemasEmbeddingEmbeddingMode","DestinationSnowflakeCortexSchemasEmbeddingMode","DestinationSnowflakeCortexSchemasMode","DestinationSnowflakeCortexSchemasProcessingMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterTextSplitterMode","DestinationSnowflakeCortexTextSplitter","DestinationSnowflakeOAuth20","DestinationSnowflakeSchemasAuthType","DestinationSnowflakeSchemasCredentialsAuthType","DestinationSnowflakeSnowflake","DestinationTeradata","DestinationTeradataAllow","DestinationTeradataDisable","DestinationTeradataMode","DestinationTeradataPrefer","DestinationTeradataRequire","DestinationTeradataSSLModes","DestinationTeradataSchemasMode","DestinationTeradataSchemasSSLModeSSLModes5Mode","DestinationTeradataSchemasSSLModeSSLModes6Mode","DestinationTeradataSchemasSSLModeSSLModesMode","DestinationTeradataSchemasSslModeMode","DestinationTeradataVerifyCa","DestinationTeradataVerifyFull","DestinationTimeplus","DestinationTypesense","DestinationVectara","DestinationWeaviate","DestinationWeaviateAPIToken","DestinationWeaviateAuthentication","DestinationWeaviateAzureOpenAI","DestinationWeaviateByMarkdownHeader","DestinationWeaviateByProgrammingLanguage","DestinationWeaviateBySeparator","DestinationWeaviateCohere","DestinationWeaviateEmbedding","DestinationWeaviateFake","DestinationWeaviateFieldNameMappingConfigModel","DestinationWeaviateIndexing","DestinationWeaviateLanguage","DestinationWeaviateMode","DestinationWeaviateOpenAI","DestinationWeaviateOpenAICompatible","DestinationWeaviateProcessingConfigModel","DestinationWeaviateSchemasEmbeddingEmbedding5Mode","DestinationWeaviateSchemasEmbeddingEmbedding6Mode","DestinationWeaviateSchemasEmbeddingEmbedding7Mode","DestinationWeaviateSchemasEmbeddingEmbeddingMode","DestinationWeaviateSchemasEmbeddingMode","DestinationWeaviateSchemasIndexingAuthAuthenticationMode","DestinationWeaviateSchemasIndexingAuthMode","DestinationWeaviateSchemasIndexingMode","DestinationWeaviateSchemasMode","DestinationWeaviateSchemasProcessingMode","DestinationWeaviateSchemasProcessingTextSplitterMode","DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode","DestinationWeaviateTextSplitter","DestinationWeaviateUsernamePassword","DestinationYellowbrick","DestinationYellowbrickAllow","DestinationYellowbrickDisable","DestinationYellowbrickMode","DestinationYellowbrickNoTunnel","DestinationYellowbrickPasswordAuthentication","DestinationYellowbrickPrefer","DestinationYellowbrickRequire","DestinationYellowbrickSSHKeyAuthentication","DestinationYellowbrickSSHTunnelMethod","DestinationYellowbrickSSLModes","DestinationYellowbrickSchemasMode","DestinationYellowbrickSchemasSSLModeSSLModes5Mode","DestinationYellowbrickSchemasSSLModeSSLModes6Mode","DestinationYellowbrickSchemasSSLModeSSLModesMode","DestinationYellowbrickSchemasSslModeMode","DestinationYellowbrickSchemasTunnelMethod","DestinationYellowbrickSchemasTunnelMethodTunnelMethod","DestinationYellowbrickTunnelMethod","DestinationYellowbrickVerifyCa","DestinationYellowbrickVerifyFull","DestinationsResponse","DetailType","DetectChangesWithXminSystemColumn","DevNull","Dimension","DimensionsFilter","Disable","Disabled","DistanceMetric","Dixa","Dockerhub","DocumentFileTypeFormatExperimental","DomainRegionCode","DoubleValue","Dremio","Duckdb","DynamoDBRegion","Dynamodb","E2eTestCloud","EUBasedAccount","Elasticsearch","Emailoctopus","Embedding","Enabled","EncryptedTrustServerCertificate","EncryptedVerifyCertificate","Encryption","EncryptionAlgorithm","EncryptionMethod","EngagementWindowDays","Environment","ExcelFormat","ExchangeRates","Expression","ExternalTableViaS3","FacebookMarketing","FacebookMarketingCredentials","Fake","Faker","Fauna","FieldNameMappingConfigModel","File","FileBasedStreamConfig","FileFormat","Filetype","Filter","FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody","FilterName","FilterType","Firebolt","Firestore","Flattening","Fleetio","Format","FormatType","FormatTypeWildcard","Freshcaller","Freshdesk","Freshsales","FromCSV","FromField","FromValue","GCSBucketRegion","GCSGoogleCloudStorage","GCSStaging","GCSTmpFilesAfterwardProcessing","GainsightPx","Gcs","GeographyEnum","GeographyEnumNoDefault","Getlago","Github","GithubCredentials","Gitlab","GitlabCredentials","Glassfrog","GlobalAccount","Gnews","Goldcast","GoogleAds","GoogleAdsCredentials","GoogleAnalyticsDataAPI","GoogleAnalyticsDataAPICredentials","GoogleCredentials","GoogleDirectory","GoogleDrive","GoogleDriveCredentials","GooglePagespeedInsights","GoogleSearchConsole","GoogleSheets","GoogleSheetsCredentials","GoogleWebfonts","Granularity","GranularityForGeoLocationRegion","GranularityForPeriodicReports","Greenhouse","Gridly","Gzip","HMACKey","HTTPSPublicWeb","HardcodedRecords","Harvest","Header","HeaderDefinitionType","Hibob","HighLevel","Hubplanner","Hubspot","HubspotCredentials","IAMRole","IAMUser","In","InListFilter","Indexing","InferenceType","InitiateOauthRequest","InsightConfig","Insightly","Instagram","Instance","Instatus","Int64Value","Intercom","InvalidCDCPositionBehaviorAdvanced","Ip2whois","IssuesStreamExpandWith","Iterable","JSONLinesNewlineDelimitedJSON","Jira","JobCreateRequest","JobResponse","JobStatusEnum","JobTypeEnum","JobsResponse","Jsonl","JsonlFormat","K6Cloud","KeyPairAuthentication","Klarna","Klaviyo","Kyve","LSNCommitBehaviour","Language","Launchdarkly","Leadfeeder","Lemlist","Level","LeverHiring","LeverHiringCredentials","LinkedinAds","LinkedinAdsCredentials","LinkedinPages","Linnworks","LoadingMethod","Local","LoginPassword","Lokalise","Looker","Luma","Mailchimp","MailchimpCredentials","Mailgun","MailjetSms","Marketo","Metabase","Method","MetricsFilter","MicrosoftOnedrive","MicrosoftOnedriveCredentials","MicrosoftSharepoint","MicrosoftSharepointCredentials","MicrosoftTeams","MicrosoftTeamsCredentials","Milvus","Mixpanel","MockCatalog","Mode","Monday","MondayCredentials","MongoDBAtlas","MongoDBAtlasReplicaSet","MongoDbInstanceType","Mongodb","MongodbV2","Mssql","MultiSchema","MyHours","Mysql","NamespaceDefinitionEnum","NamespaceDefinitionEnumNoDefault","NativeNetworkEncryptionNNE","Netsuite","NoAuth","NoAuthentication","NoCompression","NoExternalEmbedding","NoTunnel","NonBreakingSchemaUpdatesBehaviorEnum","NonBreakingSchemaUpdatesBehaviorEnumNoDefault","NoneT","Normalization","NormalizationFlattening","NorthpassLms","NotExpression","Notion","NotionCredentials","Nullable","NumericFilter","Nytimes","OAuth","OAuth20","OAuth20Credentials","OAuth20WithPrivateKey","OAuth2AccessToken","OAuth2ConfidentialApplication","OAuth2Recommended","OAuthActorNames","OAuthCredentialsConfiguration","OauthAuthentication","Okta","Omnisend","Onesignal","OpenAI","OpenAICompatible","Operator","OptionTitle","OptionsList","OrGroup","Oracle","Orb","Orbit","OrganizationResponse","OrganizationsResponse","OriginDatacenterOfTheSurveyMonkeyAccount","OutbrainAmplify","OutputFormat","OutputFormatWildcard","Outreach","Parquet","ParquetColumnarStorage","ParquetFormat","ParsingStrategy","PasswordAuthentication","PaypalTransaction","Paystack","Pendo","Pennylane","PeriodUsedForMostPopularStreams","PermissionCreateRequest","PermissionResponse","PermissionResponseRead","PermissionScope","PermissionType","PermissionUpdateRequest","PermissionsResponse","Persistiq","PersonalAccessToken","PexelsAPI","Pinecone","Pinterest","PinterestCredentials","Pipedrive","PivotCategory","Planhat","Plugin","Pocket","Pokeapi","PokemonName","PolygonStockAPI","Postgres","Posthog","Postmarkapp","Prefer","Preferred","Prestashop","PrivateApp","PrivateToken","Processing","ProcessingConfigModel","ProductCatalog","ProjectSecret","PublicPermissionType","Pubsub","Pypi","Qdrant","Qualaroo","Queries","Railz","ReadChangesUsingBinaryLogCDC","ReadChangesUsingChangeDataCaptureCDC","ReadChangesUsingWriteAheadLogCDC","Recharge","Recreation","Recruitee","Recurly","Reddit","Redis","Redshift","Region","ReplicaSet","ReportConfig","ReportName","ReportOptions","ReportRecordTypes","ReportingDataObject","Require","Required","Retently","RetentlyCredentials","RkiCovid","RoleBasedAuthentication","Rss","S3","S3AmazonWebServices","S3BucketRegion","S3Glue","SCPSecureCopyProtocol","SFTPSecureFileTransferProtocol","SQLInserts","SSHKeyAuthentication","SSHSecureShell","SSHTunnelMethod","SSLMethod","SSLModes","Salesforce","Salesloft","SandboxAccessToken","SapFieldglass","Savvycal","ScanChangesWithUserDefinedCursor","ScheduleTypeEnum","ScheduleTypeWithBasicEnum","SchemeBasicAuth","SchemeClientCredentials","Scryfall","SearchCriteria","SearchScope","Secoda","Security","SelectedFieldInfo","SelfManagedReplicaSet","Sendgrid","Sendinblue","Senseforce","Sentry","SerializationLibrary","ServiceAccount","ServiceAccountKey","ServiceAccountKeyAuthentication","ServiceKeyAuthentication","ServiceName","Sftp","SftpBulk","SftpJSON","ShareTypeUsedForMostPopularSharedStream","Shopify","ShopifyAuthorizationMethod","ShopifyCredentials","Shortio","SignInViaGoogleOAuth","SignInViaSlackOAuth","Silent","SingleSchema","SingleStoreAccessToken","Site","Slack","SlackCredentials","Smaily","Smartengage","Smartsheets","SmartsheetsCredentials","SnapchatMarketing","Snappy","Snowflake","SnowflakeConnection","SnowflakeCortex","SnowflakeCredentials","SonarCloud","SortBy","SourceAha","SourceAirbyte","SourceAircall","SourceAirtable","SourceAirtableAirtable","SourceAirtableAuthMethod","SourceAirtableAuthentication","SourceAirtableOAuth20","SourceAirtablePersonalAccessToken","SourceAirtableSchemasAuthMethod","SourceAmazonAds","SourceAmazonAdsAmazonAds","SourceAmazonAdsAuthType","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAmazonSellerPartner","SourceAmazonSellerPartnerAuthType","SourceAmazonSqs","SourceAmazonSqsAWSRegion","SourceAmplitude","SourceApifyDataset","SourceAppfollow","SourceAsana","SourceAsanaAsana","SourceAsanaCredentialsTitle","SourceAsanaSchemasCredentialsTitle","SourceAuth0","SourceAuth0AuthenticationMethod","SourceAuth0SchemasAuthenticationMethod","SourceAuth0SchemasCredentialsAuthenticationMethod","SourceAwsCloudtrail","SourceAzureBlobStorage","SourceAzureBlobStorageAuthType","SourceAzureBlobStorageAuthentication","SourceAzureBlobStorageAzureBlobStorage","SourceAzureBlobStorageFiletype","SourceAzureBlobStorageHeaderDefinitionType","SourceAzureBlobStorageMode","SourceAzureBlobStorageSchemasAuthType","SourceAzureBlobStorageSchemasFiletype","SourceAzureBlobStorageSchemasHeaderDefinitionType","SourceAzureBlobStorageSchemasStreamsFiletype","SourceAzureBlobStorageSchemasStreamsFormatFiletype","SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype","SourceAzureTable","SourceBambooHr","SourceBasecamp","SourceBigquery","SourceBigqueryBigquery","SourceBingAds","SourceBingAdsBingAds","SourceBraintree","SourceBraintreeEnvironment","SourceBraze","SourceBreezyHr","SourceCalendly","SourceCart","SourceCartAuthType","SourceCartAuthorizationMethod","SourceCartSchemasAuthType","SourceChargebee","SourceChartmogul","SourceClazar","SourceClickhouse","SourceClickhouseClickhouse","SourceClickhouseNoTunnel","SourceClickhousePasswordAuthentication","SourceClickhouseSSHKeyAuthentication","SourceClickhouseSSHTunnelMethod","SourceClickhouseSchemasTunnelMethod","SourceClickhouseSchemasTunnelMethodTunnelMethod","SourceClickhouseTunnelMethod","SourceClickupAPI","SourceClockify","SourceCloseCom","SourceCoda","SourceCoinAPI","SourceCoinmarketcap","SourceConfigcat","SourceConfiguration","SourceConfluence","SourceConvex","SourceConvexConvex","SourceCreateRequest","SourceCustomerIo","SourceDatadog","SourceDatascope","SourceDbt","SourceDelighted","SourceDixa","SourceDockerhub","SourceDremio","SourceDynamodb","SourceDynamodbAuthType","SourceDynamodbCredentials","SourceDynamodbDynamodb","SourceDynamodbDynamodbRegion","SourceDynamodbSchemasAuthType","SourceE2eTestCloud","SourceE2eTestCloudSchemasType","SourceE2eTestCloudType","SourceEmailoctopus","SourceExchangeRates","SourceFacebookMarketing","SourceFacebookMarketingActionReportTime","SourceFacebookMarketingAuthType","SourceFacebookMarketingAuthentication","SourceFacebookMarketingFacebookMarketing","SourceFacebookMarketingSchemasAuthType","SourceFacebookMarketingValidEnums","SourceFaker","SourceFauna","SourceFaunaDeletionMode","SourceFaunaSchemasDeletionMode","SourceFile","SourceFileS3AmazonWebServices","SourceFileSchemasProviderStorage","SourceFileSchemasProviderStorageProvider6Storage","SourceFileSchemasProviderStorageProvider7Storage","SourceFileSchemasProviderStorageProviderStorage","SourceFileSchemasStorage","SourceFileStorage","SourceFirebolt","SourceFireboltFirebolt","SourceFleetio","SourceFreshcaller","SourceFreshdesk","SourceFreshsales","SourceGainsightPx","SourceGcs","SourceGcsAutogenerated","SourceGcsAvroFormat","SourceGcsCSVFormat","SourceGcsCSVHeaderDefinition","SourceGcsFileBasedStreamConfig","SourceGcsFiletype","SourceGcsFormat","SourceGcsFromCSV","SourceGcsGcs","SourceGcsHeaderDefinitionType","SourceGcsInferenceType","SourceGcsJsonlFormat","SourceGcsLocal","SourceGcsMode","SourceGcsParquetFormat","SourceGcsParsingStrategy","SourceGcsProcessing","SourceGcsSchemasFiletype","SourceGcsSchemasHeaderDefinitionType","SourceGcsSchemasMode","SourceGcsSchemasStreamsFiletype","SourceGcsSchemasStreamsFormatFiletype","SourceGcsSchemasStreamsFormatFormat6Filetype","SourceGcsSchemasStreamsFormatFormatFiletype","SourceGcsSchemasStreamsHeaderDefinitionType","SourceGcsUserProvided","SourceGcsValidationPolicy","SourceGetlago","SourceGithub","SourceGithubAuthentication","SourceGithubGithub","SourceGithubOptionTitle","SourceGithubPersonalAccessToken","SourceGitlab","SourceGitlabAuthType","SourceGitlabAuthorizationMethod","SourceGitlabGitlab","SourceGitlabOAuth20","SourceGitlabSchemasAuthType","SourceGlassfrog","SourceGnews","SourceGoldcast","SourceGoogleAds","SourceGoogleAdsGoogleAds","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIAndGroup","SourceGoogleAnalyticsDataAPIAuthType","SourceGoogleAnalyticsDataAPIBetweenFilter","SourceGoogleAnalyticsDataAPICredentials","SourceGoogleAnalyticsDataAPICustomReportConfig","SourceGoogleAnalyticsDataAPIDisabled","SourceGoogleAnalyticsDataAPIDoubleValue","SourceGoogleAnalyticsDataAPIEnabled","SourceGoogleAnalyticsDataAPIExpression","SourceGoogleAnalyticsDataAPIFilter","SourceGoogleAnalyticsDataAPIFilterName","SourceGoogleAnalyticsDataAPIFilterType","SourceGoogleAnalyticsDataAPIFromValue","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIGranularity","SourceGoogleAnalyticsDataAPIInListFilter","SourceGoogleAnalyticsDataAPIInt64Value","SourceGoogleAnalyticsDataAPINotExpression","SourceGoogleAnalyticsDataAPINumericFilter","SourceGoogleAnalyticsDataAPIOrGroup","SourceGoogleAnalyticsDataAPISchemasAuthType","SourceGoogleAnalyticsDataAPISchemasBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayEnabled","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType","SourceGoogleAnalyticsDataAPISchemasDoubleValue","SourceGoogleAnalyticsDataAPISchemasEnabled","SourceGoogleAnalyticsDataAPISchemasExpression","SourceGoogleAnalyticsDataAPISchemasFilter","SourceGoogleAnalyticsDataAPISchemasFilterName","SourceGoogleAnalyticsDataAPISchemasFilterType","SourceGoogleAnalyticsDataAPISchemasFromValue","SourceGoogleAnalyticsDataAPISchemasInListFilter","SourceGoogleAnalyticsDataAPISchemasInt64Value","SourceGoogleAnalyticsDataAPISchemasNumericFilter","SourceGoogleAnalyticsDataAPISchemasStringFilter","SourceGoogleAnalyticsDataAPISchemasToValue","SourceGoogleAnalyticsDataAPISchemasValidEnums","SourceGoogleAnalyticsDataAPISchemasValue","SourceGoogleAnalyticsDataAPISchemasValueType","SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication","SourceGoogleAnalyticsDataAPIStringFilter","SourceGoogleAnalyticsDataAPIToValue","SourceGoogleAnalyticsDataAPIValidEnums","SourceGoogleAnalyticsDataAPIValue","SourceGoogleAnalyticsDataAPIValueType","SourceGoogleDirectory","SourceGoogleDirectoryCredentialsTitle","SourceGoogleDirectoryGoogleCredentials","SourceGoogleDirectorySchemasCredentialsTitle","SourceGoogleDrive","SourceGoogleDriveAuthType","SourceGoogleDriveAuthenticateViaGoogleOAuth","SourceGoogleDriveAuthentication","SourceGoogleDriveAutogenerated","SourceGoogleDriveAvroFormat","SourceGoogleDriveCSVFormat","SourceGoogleDriveCSVHeaderDefinition","SourceGoogleDriveDocumentFileTypeFormatExperimental","SourceGoogleDriveFileBasedStreamConfig","SourceGoogleDriveFiletype","SourceGoogleDriveFormat","SourceGoogleDriveFromCSV","SourceGoogleDriveGoogleDrive","SourceGoogleDriveHeaderDefinitionType","SourceGoogleDriveJsonlFormat","SourceGoogleDriveLocal","SourceGoogleDriveMode","SourceGoogleDriveParquetFormat","SourceGoogleDriveParsingStrategy","SourceGoogleDriveProcessing","SourceGoogleDriveSchemasAuthType","SourceGoogleDriveSchemasFiletype","SourceGoogleDriveSchemasHeaderDefinitionType","SourceGoogleDriveSchemasStreamsFiletype","SourceGoogleDriveSchemasStreamsFormatFiletype","SourceGoogleDriveSchemasStreamsFormatFormatFiletype","SourceGoogleDriveSchemasStreamsHeaderDefinitionType","SourceGoogleDriveServiceAccountKeyAuthentication","SourceGoogleDriveUserProvided","SourceGoogleDriveValidationPolicy","SourceGooglePagespeedInsights","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthType","SourceGoogleSearchConsoleCustomReportConfig","SourceGoogleSearchConsoleGoogleSearchConsole","SourceGoogleSearchConsoleOAuth","SourceGoogleSearchConsoleSchemasAuthType","SourceGoogleSearchConsoleServiceAccountKeyAuthentication","SourceGoogleSearchConsoleValidEnums","SourceGoogleSheets","SourceGoogleSheetsAuthType","SourceGoogleSheetsAuthenticateViaGoogleOAuth","SourceGoogleSheetsAuthentication","SourceGoogleSheetsGoogleSheets","SourceGoogleSheetsSchemasAuthType","SourceGoogleSheetsServiceAccountKeyAuthentication","SourceGoogleWebfonts","SourceGreenhouse","SourceGridly","SourceHardcodedRecords","SourceHarvest","SourceHarvestAuthType","SourceHarvestAuthenticateWithPersonalAccessToken","SourceHarvestAuthenticationMechanism","SourceHarvestSchemasAuthType","SourceHibob","SourceHighLevel","SourceHubplanner","SourceHubspot","SourceHubspotAuthType","SourceHubspotAuthentication","SourceHubspotHubspot","SourceHubspotOAuth","SourceHubspotSchemasAuthType","SourceInsightly","SourceInstagram","SourceInstagramInstagram","SourceInstatus","SourceIntercom","SourceIntercomIntercom","SourceIp2whois","SourceIterable","SourceJira","SourceK6Cloud","SourceKlarna","SourceKlarnaRegion","SourceKlaviyo","SourceKyve","SourceLaunchdarkly","SourceLeadfeeder","SourceLemlist","SourceLeverHiring","SourceLeverHiringAuthType","SourceLeverHiringAuthenticationMechanism","SourceLeverHiringEnvironment","SourceLeverHiringLeverHiring","SourceLeverHiringSchemasAuthType","SourceLinkedinAds","SourceLinkedinAdsAuthMethod","SourceLinkedinAdsAuthentication","SourceLinkedinAdsLinkedinAds","SourceLinkedinAdsOAuth20","SourceLinkedinAdsSchemasAuthMethod","SourceLinkedinPages","SourceLinkedinPagesAccessToken","SourceLinkedinPagesAuthMethod","SourceLinkedinPagesAuthentication","SourceLinkedinPagesOAuth20","SourceLinkedinPagesSchemasAuthMethod","SourceLinnworks","SourceLokalise","SourceLooker","SourceLuma","SourceMailchimp","SourceMailchimpAuthType","SourceMailchimpAuthentication","SourceMailchimpMailchimp","SourceMailchimpOAuth20","SourceMailchimpSchemasAuthType","SourceMailgun","SourceMailjetSms","SourceMarketo","SourceMetabase","SourceMicrosoftOnedrive","SourceMicrosoftOnedriveAuthType","SourceMicrosoftOnedriveAuthentication","SourceMicrosoftOnedriveAutogenerated","SourceMicrosoftOnedriveAvroFormat","SourceMicrosoftOnedriveCSVFormat","SourceMicrosoftOnedriveCSVHeaderDefinition","SourceMicrosoftOnedriveFileBasedStreamConfig","SourceMicrosoftOnedriveFiletype","SourceMicrosoftOnedriveFormat","SourceMicrosoftOnedriveFromCSV","SourceMicrosoftOnedriveHeaderDefinitionType","SourceMicrosoftOnedriveJsonlFormat","SourceMicrosoftOnedriveLocal","SourceMicrosoftOnedriveMicrosoftOnedrive","SourceMicrosoftOnedriveMode","SourceMicrosoftOnedriveParquetFormat","SourceMicrosoftOnedriveParsingStrategy","SourceMicrosoftOnedriveProcessing","SourceMicrosoftOnedriveSchemasAuthType","SourceMicrosoftOnedriveSchemasFiletype","SourceMicrosoftOnedriveSchemasHeaderDefinitionType","SourceMicrosoftOnedriveSchemasStreamsFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsHeaderDefinitionType","SourceMicrosoftOnedriveUnstructuredDocumentFormat","SourceMicrosoftOnedriveUserProvided","SourceMicrosoftOnedriveValidationPolicy","SourceMicrosoftSharepoint","SourceMicrosoftSharepointAuthType","SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth","SourceMicrosoftSharepointAuthentication","SourceMicrosoftSharepointAutogenerated","SourceMicrosoftSharepointAvroFormat","SourceMicrosoftSharepointCSVFormat","SourceMicrosoftSharepointCSVHeaderDefinition","SourceMicrosoftSharepointExcelFormat","SourceMicrosoftSharepointFileBasedStreamConfig","SourceMicrosoftSharepointFiletype","SourceMicrosoftSharepointFormat","SourceMicrosoftSharepointFromCSV","SourceMicrosoftSharepointHeaderDefinitionType","SourceMicrosoftSharepointJsonlFormat","SourceMicrosoftSharepointLocal","SourceMicrosoftSharepointMicrosoftSharepoint","SourceMicrosoftSharepointMode","SourceMicrosoftSharepointParquetFormat","SourceMicrosoftSharepointParsingStrategy","SourceMicrosoftSharepointProcessing","SourceMicrosoftSharepointSchemasAuthType","SourceMicrosoftSharepointSchemasFiletype","SourceMicrosoftSharepointSchemasHeaderDefinitionType","SourceMicrosoftSharepointSchemasStreamsFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFormat6Filetype","SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype","SourceMicrosoftSharepointSchemasStreamsHeaderDefinitionType","SourceMicrosoftSharepointSearchScope","SourceMicrosoftSharepointServiceKeyAuthentication","SourceMicrosoftSharepointUnstructuredDocumentFormat","SourceMicrosoftSharepointUserProvided","SourceMicrosoftSharepointValidationPolicy","SourceMicrosoftTeams","SourceMicrosoftTeamsAuthType","SourceMicrosoftTeamsAuthenticationMechanism","SourceMicrosoftTeamsMicrosoftTeams","SourceMicrosoftTeamsSchemasAuthType","SourceMixpanel","SourceMixpanelOptionTitle","SourceMixpanelRegion","SourceMixpanelSchemasOptionTitle","SourceMonday","SourceMondayAuthType","SourceMondayAuthorizationMethod","SourceMondayMonday","SourceMondayOAuth20","SourceMondaySchemasAuthType","SourceMongodbV2","SourceMongodbV2ClusterType","SourceMongodbV2SchemasClusterType","SourceMssql","SourceMssqlEncryptedTrustServerCertificate","SourceMssqlEncryptedVerifyCertificate","SourceMssqlInvalidCDCPositionBehaviorAdvanced","SourceMssqlMethod","SourceMssqlMssql","SourceMssqlNoTunnel","SourceMssqlPasswordAuthentication","SourceMssqlSSHKeyAuthentication","SourceMssqlSSHTunnelMethod","SourceMssqlSSLMethod","SourceMssqlSchemasMethod","SourceMssqlSchemasSSLMethodSSLMethodSSLMethod","SourceMssqlSchemasSslMethod","SourceMssqlSchemasSslMethodSslMethod","SourceMssqlSchemasTunnelMethod","SourceMssqlSchemasTunnelMethodTunnelMethod","SourceMssqlTunnelMethod","SourceMyHours","SourceMysql","SourceMysqlInvalidCDCPositionBehaviorAdvanced","SourceMysqlMethod","SourceMysqlMode","SourceMysqlMysql","SourceMysqlNoTunnel","SourceMysqlPasswordAuthentication","SourceMysqlSSHKeyAuthentication","SourceMysqlSSHTunnelMethod","SourceMysqlSSLModes","SourceMysqlScanChangesWithUserDefinedCursor","SourceMysqlSchemasMethod","SourceMysqlSchemasMode","SourceMysqlSchemasSSLModeSSLModesMode","SourceMysqlSchemasSslModeMode","SourceMysqlSchemasTunnelMethod","SourceMysqlSchemasTunnelMethodTunnelMethod","SourceMysqlTunnelMethod","SourceMysqlUpdateMethod","SourceMysqlVerifyCA","SourceNetsuite","SourceNorthpassLms","SourceNotion","SourceNotionAccessToken","SourceNotionAuthType","SourceNotionAuthenticationMethod","SourceNotionNotion","SourceNotionOAuth20","SourceNotionSchemasAuthType","SourceNytimes","SourceOkta","SourceOktaAPIToken","SourceOktaAuthType","SourceOktaAuthorizationMethod","SourceOktaOAuth20","SourceOktaSchemasAuthType","SourceOktaSchemasCredentialsAuthType","SourceOmnisend","SourceOnesignal","SourceOracle","SourceOracleConnectionType","SourceOracleEncryptionMethod","SourceOracleNoTunnel","SourceOracleOracle","SourceOraclePasswordAuthentication","SourceOracleSSHKeyAuthentication","SourceOracleSSHTunnelMethod","SourceOracleSchemasTunnelMethod","SourceOracleSchemasTunnelMethodTunnelMethod","SourceOracleTunnelMethod","SourceOrb","SourceOrbit","SourceOutbrainAmplify","SourceOutbrainAmplifyAccessToken","SourceOutbrainAmplifyAuthenticationMethod","SourceOutbrainAmplifyUsernamePassword","SourceOutreach","SourcePatchRequest","SourcePaypalTransaction","SourcePaystack","SourcePendo","SourcePennylane","SourcePersistiq","SourcePexelsAPI","SourcePinterest","SourcePinterestAuthMethod","SourcePinterestLevel","SourcePinterestPinterest","SourcePinterestSchemasValidEnums","SourcePinterestValidEnums","SourcePipedrive","SourcePlanhat","SourcePocket","SourcePocketSortBy","SourcePokeapi","SourcePolygonStockAPI","SourcePostgres","SourcePostgresAllow","SourcePostgresDisable","SourcePostgresInvalidCDCPositionBehaviorAdvanced","SourcePostgresMethod","SourcePostgresMode","SourcePostgresNoTunnel","SourcePostgresPasswordAuthentication","SourcePostgresPostgres","SourcePostgresPrefer","SourcePostgresRequire","SourcePostgresSSHKeyAuthentication","SourcePostgresSSHTunnelMethod","SourcePostgresSSLModes","SourcePostgresScanChangesWithUserDefinedCursor","SourcePostgresSchemasMethod","SourcePostgresSchemasMode","SourcePostgresSchemasReplicationMethodMethod","SourcePostgresSchemasSSLModeSSLModes5Mode","SourcePostgresSchemasSSLModeSSLModes6Mode","SourcePostgresSchemasSSLModeSSLModesMode","SourcePostgresSchemasSslModeMode","SourcePostgresSchemasTunnelMethod","SourcePostgresSchemasTunnelMethodTunnelMethod","SourcePostgresTunnelMethod","SourcePostgresUpdateMethod","SourcePostgresVerifyCa","SourcePostgresVerifyFull","SourcePosthog","SourcePostmarkapp","SourcePrestashop","SourcePutRequest","SourcePypi","SourceQualaroo","SourceRailz","SourceRecharge","SourceRecreation","SourceRecruitee","SourceRecurly","SourceReddit","SourceRedshift","SourceRedshiftRedshift","SourceResponse","SourceRetently","SourceRetentlyAuthType","SourceRetentlyAuthenticationMechanism","SourceRetentlyRetently","SourceRetentlySchemasAuthType","SourceRkiCovid","SourceRss","SourceS3","SourceS3Autogenerated","SourceS3AvroFormat","SourceS3CSVFormat","SourceS3CSVHeaderDefinition","SourceS3FileBasedStreamConfig","SourceS3FileFormat","SourceS3Filetype","SourceS3Format","SourceS3FromCSV","SourceS3HeaderDefinitionType","SourceS3InferenceType","SourceS3JsonlFormat","SourceS3Local","SourceS3Mode","SourceS3ParquetFormat","SourceS3ParsingStrategy","SourceS3Processing","SourceS3S3","SourceS3SchemasFiletype","SourceS3SchemasFormatFiletype","SourceS3SchemasHeaderDefinitionType","SourceS3SchemasStreamsFiletype","SourceS3SchemasStreamsFormatFiletype","SourceS3SchemasStreamsFormatFormat4Filetype","SourceS3SchemasStreamsFormatFormat5Filetype","SourceS3SchemasStreamsFormatFormatFiletype","SourceS3SchemasStreamsHeaderDefinitionType","SourceS3UnstructuredDocumentFormat","SourceS3UserProvided","SourceS3ValidationPolicy","SourceSalesforce","SourceSalesforceSalesforce","SourceSalesloft","SourceSalesloftAuthType","SourceSalesloftCredentials","SourceSalesloftSchemasAuthType","SourceSapFieldglass","SourceSavvycal","SourceScryfall","SourceSecoda","SourceSendgrid","SourceSendinblue","SourceSenseforce","SourceSentry","SourceSftp","SourceSftpAuthMethod","SourceSftpAuthentication","SourceSftpBulk","SourceSftpBulkAPIParameterConfigModel","SourceSftpBulkAuthType","SourceSftpBulkAuthentication","SourceSftpBulkAutogenerated","SourceSftpBulkAvroFormat","SourceSftpBulkCSVFormat","SourceSftpBulkCSVHeaderDefinition","SourceSftpBulkExcelFormat","SourceSftpBulkFileBasedStreamConfig","SourceSftpBulkFiletype","SourceSftpBulkFormat","SourceSftpBulkFromCSV","SourceSftpBulkHeaderDefinitionType","SourceSftpBulkInferenceType","SourceSftpBulkJsonlFormat","SourceSftpBulkLocal","SourceSftpBulkMode","SourceSftpBulkParquetFormat","SourceSftpBulkParsingStrategy","SourceSftpBulkProcessing","SourceSftpBulkSchemasAuthType","SourceSftpBulkSchemasFiletype","SourceSftpBulkSchemasHeaderDefinitionType","SourceSftpBulkSchemasMode","SourceSftpBulkSchemasStreamsFiletype","SourceSftpBulkSchemasStreamsFormatFiletype","SourceSftpBulkSchemasStreamsFormatFormat6Filetype","SourceSftpBulkSchemasStreamsFormatFormatFiletype","SourceSftpBulkSchemasStreamsHeaderDefinitionType","SourceSftpBulkUnstructuredDocumentFormat","SourceSftpBulkUserProvided","SourceSftpBulkValidationPolicy","SourceSftpBulkViaAPI","SourceSftpPasswordAuthentication","SourceSftpSSHKeyAuthentication","SourceSftpSchemasAuthMethod","SourceShopify","SourceShopifyAuthMethod","SourceShopifyOAuth20","SourceShopifySchemasAuthMethod","SourceShopifyShopify","SourceShortio","SourceSlack","SourceSlackAPIToken","SourceSlackAuthenticationMechanism","SourceSlackOptionTitle","SourceSlackSchemasOptionTitle","SourceSlackSlack","SourceSmaily","SourceSmartengage","SourceSmartsheets","SourceSmartsheetsAuthType","SourceSmartsheetsAuthorizationMethod","SourceSmartsheetsOAuth20","SourceSmartsheetsSchemasAuthType","SourceSmartsheetsSmartsheets","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketing","SourceSnowflake","SourceSnowflakeAuthType","SourceSnowflakeAuthorizationMethod","SourceSnowflakeKeyPairAuthentication","SourceSnowflakeOAuth20","SourceSnowflakeSchemasAuthType","SourceSnowflakeSchemasCredentialsAuthType","SourceSnowflakeSnowflake","SourceSnowflakeUsernameAndPassword","SourceSonarCloud","SourceSpacexAPI","SourceSquare","SourceSquareAPIKey","SourceSquareAuthType","SourceSquareAuthentication","SourceSquareSchemasAuthType","SourceSquareSquare","SourceStrava","SourceStravaAuthType","SourceStripe","SourceSurveySparrow","SourceSurveySparrowURLBase","SourceSurveymonkey","SourceSurveymonkeyAuthMethod","SourceSurveymonkeySurveymonkey","SourceTempo","SourceTheGuardianAPI","SourceTiktokMarketing","SourceTiktokMarketingAuthType","SourceTiktokMarketingAuthenticationMethod","SourceTiktokMarketingOAuth20","SourceTiktokMarketingSchemasAuthType","SourceTiktokMarketingTiktokMarketing","SourceTrello","SourceTrustpilot","SourceTrustpilotAPIKey","SourceTrustpilotAuthType","SourceTrustpilotAuthorizationMethod","SourceTrustpilotOAuth20","SourceTrustpilotSchemasAuthType","SourceTvmazeSchedule","SourceTwilio","SourceTwilioTaskrouter","SourceTwitter","SourceTypeform","SourceTypeformAuthType","SourceTypeformAuthorizationMethod","SourceTypeformOAuth20","SourceTypeformPrivateToken","SourceTypeformSchemasAuthType","SourceTypeformTypeform","SourceUsCensus","SourceVantage","SourceWebflow","SourceWhiskyHunter","SourceWikipediaPageviews","SourceWoocommerce","SourceXkcd","SourceYandexMetrica","SourceYotpo","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalytics","SourceZendeskChat","SourceZendeskChatAccessToken","SourceZendeskChatAuthorizationMethod","SourceZendeskChatCredentials","SourceZendeskChatOAuth20","SourceZendeskChatSchemasCredentials","SourceZendeskChatZendeskChat","SourceZendeskSell","SourceZendeskSunshine","SourceZendeskSunshineAPIToken","SourceZendeskSunshineAuthMethod","SourceZendeskSunshineAuthorizationMethod","SourceZendeskSunshineOAuth20","SourceZendeskSunshineSchemasAuthMethod","SourceZendeskSunshineZendeskSunshine","SourceZendeskSupport","SourceZendeskSupportAPIToken","SourceZendeskSupportAuthentication","SourceZendeskSupportCredentials","SourceZendeskSupportOAuth20","SourceZendeskSupportSchemasCredentials","SourceZendeskSupportZendeskSupport","SourceZendeskTalk","SourceZendeskTalkAPIToken","SourceZendeskTalkAuthType","SourceZendeskTalkAuthentication","SourceZendeskTalkOAuth20","SourceZendeskTalkSchemasAuthType","SourceZendeskTalkZendeskTalk","SourceZenloop","SourceZohoCrm","SourceZohoCrmEnvironment","SourceZoom","SourcesResponse","SpacexAPI","Square","SquareCredentials","StandaloneMongoDbInstance","State","StateFilter","Status","Storage","StorageProvider","Strategies","Strava","StreamConfiguration","StreamConfigurations","StreamProperties","StreamsCriteria","StringFilter","Stripe","SurveyMonkeyAuthorizationMethod","SurveySparrow","Surveymonkey","SurveymonkeyCredentials","SwipeUpAttributionWindow","SystemIDSID","TLSEncryptedVerifyCertificate","Tempo","Teradata","TestDestination","TestDestinationType","TextSplitter","TheGuardianAPI","TiktokMarketing","TiktokMarketingCredentials","TimeGranularity","TimeGranularityType","Timeplus","ToValue","TopHeadlinesTopic","TransformationQueryRunType","Trello","Trustpilot","TunnelMethod","TvmazeSchedule","Twilio","TwilioTaskrouter","Twitter","Type","Typeform","TypeformCredentials","Typesense","URLBase","Unencrypted","UnexpectedFieldBehavior","UnstructuredDocumentFormat","UpdateMethod","UploadingMethod","UsCensus","UserProvided","UserResponse","UsernameAndPassword","UsernamePassword","UsersResponse","ValidActionBreakdowns","ValidAdSetStatuses","ValidAdStatuses","ValidBreakdowns","ValidCampaignStatuses","ValidationPolicy","Validenums","Value","ValueType","Vantage","Vectara","VerifyCa","VerifyFull","VerifyIdentity","ViaAPI","ViewAttributionWindow","ViewWindowDays","Weaviate","Webflow","WhiskyHunter","WikipediaPageviews","Woocommerce","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse","Xkcd","Xz","YandexMetrica","Yellowbrick","Yotpo","YoutubeAnalytics","YoutubeAnalyticsCredentials","ZendeskChat","ZendeskChatCredentials","ZendeskSell","ZendeskSunshine","ZendeskSunshineCredentials","ZendeskSupport","ZendeskSupportCredentials","ZendeskTalk","ZendeskTalkCredentials","Zenloop","ZohoCRMEdition","ZohoCrm","Zoom","Zstandard"] +__all__ = ["APIAccessToken","APIEndpoint","APIKey","APIKeyAuth","APIKeySecret","APIParameterConfigModel","APIPassword","APIServer","APIToken","AWSEnvironment","AWSRegion","AWSS3Staging","AWSSellerPartnerAccountType","AccessToken","AccessTokenIsRequiredForAuthenticationRequests","AccountNames","ActionReportTime","ActorTypeEnum","AdAnalyticsReportConfiguration","Aha","Airbyte","AirbyteAPIConnectionSchedule","Aircall","Airtable","Allow","AmazonAds","AmazonSellerPartner","AmazonSqs","Amplitude","AndGroup","ApifyDataset","Appcues","Appfigures","Appfollow","Applications","Asana","AsanaCredentials","Astra","Auth0","AuthMethod","AuthType","AuthenticateViaAPIKey","AuthenticateViaAccessKeys","AuthenticateViaAsanaOauth","AuthenticateViaFacebookMarketingOauth","AuthenticateViaGoogleOauth","AuthenticateViaHarvestOAuth","AuthenticateViaLeverAPIKey","AuthenticateViaLeverOAuth","AuthenticateViaMicrosoft","AuthenticateViaMicrosoftOAuth","AuthenticateViaMicrosoftOAuth20","AuthenticateViaOAuth","AuthenticateViaOAuth20","AuthenticateViaOauth2","AuthenticateViaPassword","AuthenticateViaPrivateKey","AuthenticateViaRetentlyOAuth","AuthenticateViaStorageAccountKey","AuthenticateWithAPIToken","AuthenticateWithPersonalAccessToken","Authentication","AuthenticationMechanism","AuthenticationMethod","AuthenticationMode","AuthenticationType","AuthenticationViaGoogleOAuth","AuthenticationWildcard","Authorization","AuthorizationMethod","AuthorizationType","Autogenerated","AvroApacheAvro","AvroFormat","AwsCloudtrail","AwsDatalake","AzBlobAzureBlobStorage","AzureBlobStorage","AzureBlobStorageCredentials","AzureOpenAI","AzureTable","BambooHr","BaseURL","BaseURLPrefix","Basecamp","Basic","BatchedStandardInserts","BetweenFilter","Bigquery","BingAds","Bitly","BothUsernameAndPasswordIsRequiredForAuthenticationRequest","Braintree","Braze","BreezyHr","Brevo","Buildkite","Buzzsprout","ByMarkdownHeader","ByProgrammingLanguage","BySeparator","Bzip2","CSVCommaSeparatedValues","CSVFormat","CSVHeaderDefinition","CacheType","Calendly","Canny","CaptureModeAdvanced","Cart","Categories","CentralAPIRouter","Chameleon","Chargebee","Chartmogul","ChooseHowToPartitionData","Cimis","Clazar","ClickWindowDays","Clickhouse","ClickupAPI","Clockify","CloseCom","ClusterType","Coda","Codec","Cohere","CohortReportSettings","CohortReports","Cohorts","CohortsRange","CoinAPI","Coinmarketcap","Collection","Compression","CompressionCodec","CompressionCodecOptional","CompressionType","Configcat","Confluence","ConnectBy","ConnectionCreateRequest","ConnectionPatchRequest","ConnectionResponse","ConnectionScheduleResponse","ConnectionStatusEnum","ConnectionSyncModeEnum","ConnectionType","ConnectionsResponse","ContentType","ConversionReportTime","Convex","Country","Credential","CredentialType","Credentials","CredentialsTitle","CustomQueriesArray","CustomReportConfig","CustomerIo","CustomerStatus","DataCenterLocation","DataFreshness","DataRegion","DataSource","DataType","Databricks","Datadog","Datascope","DatasetLocation","DateRange","Dbt","DefaultVectorizer","Deflate","DeletionMode","Delighted","DestinationAstra","DestinationAstraLanguage","DestinationAstraMode","DestinationAstraSchemasEmbeddingEmbedding1Mode","DestinationAstraSchemasEmbeddingEmbeddingMode","DestinationAstraSchemasEmbeddingMode","DestinationAstraSchemasMode","DestinationAstraSchemasProcessingMode","DestinationAstraSchemasProcessingTextSplitterMode","DestinationAstraSchemasProcessingTextSplitterTextSplitterMode","DestinationAwsDatalake","DestinationAwsDatalakeCompressionCodecOptional","DestinationAwsDatalakeCredentialsTitle","DestinationAwsDatalakeFormatTypeWildcard","DestinationAzureBlobStorage","DestinationAzureBlobStorageAzureBlobStorage","DestinationAzureBlobStorageFormatType","DestinationAzureBlobStorageJSONLinesNewlineDelimitedJSON","DestinationBigquery","DestinationBigqueryCredentialType","DestinationBigqueryHMACKey","DestinationBigqueryMethod","DestinationClickhouse","DestinationClickhouseSchemasTunnelMethod","DestinationClickhouseTunnelMethod","DestinationConfiguration","DestinationConvex","DestinationCreateRequest","DestinationDatabricks","DestinationDatabricksAuthType","DestinationDatabricksSchemasAuthType","DestinationDuckdb","DestinationDynamodb","DestinationElasticsearch","DestinationElasticsearchMethod","DestinationElasticsearchNoTunnel","DestinationElasticsearchPasswordAuthentication","DestinationElasticsearchSSHKeyAuthentication","DestinationElasticsearchSSHTunnelMethod","DestinationElasticsearchSchemasAuthenticationMethodMethod","DestinationElasticsearchSchemasMethod","DestinationElasticsearchSchemasTunnelMethod","DestinationElasticsearchSchemasTunnelMethodTunnelMethod","DestinationElasticsearchTunnelMethod","DestinationFirebolt","DestinationFireboltLoadingMethod","DestinationFireboltMethod","DestinationFireboltSchemasMethod","DestinationFirestore","DestinationGcs","DestinationGcsAuthentication","DestinationGcsCSVCommaSeparatedValues","DestinationGcsCodec","DestinationGcsCompression","DestinationGcsCompressionCodec","DestinationGcsCompressionType","DestinationGcsFormatType","DestinationGcsGZIP","DestinationGcsJSONLinesNewlineDelimitedJSON","DestinationGcsNoCompression","DestinationGcsOutputFormat","DestinationGcsParquetColumnarStorage","DestinationGcsSchemasCodec","DestinationGcsSchemasCompressionType","DestinationGcsSchemasFormatCodec","DestinationGcsSchemasFormatCompressionType","DestinationGcsSchemasFormatFormatType","DestinationGcsSchemasFormatOutputFormat1Codec","DestinationGcsSchemasFormatOutputFormatCodec","DestinationGcsSchemasFormatOutputFormatFormatType","DestinationGcsSchemasFormatType","DestinationGcsSchemasNoCompression","DestinationGoogleSheets","DestinationGoogleSheetsGoogleSheets","DestinationMilvus","DestinationMilvusAPIToken","DestinationMilvusAuthentication","DestinationMilvusAzureOpenAI","DestinationMilvusByMarkdownHeader","DestinationMilvusByProgrammingLanguage","DestinationMilvusBySeparator","DestinationMilvusCohere","DestinationMilvusEmbedding","DestinationMilvusFake","DestinationMilvusFieldNameMappingConfigModel","DestinationMilvusIndexing","DestinationMilvusLanguage","DestinationMilvusMode","DestinationMilvusOpenAI","DestinationMilvusOpenAICompatible","DestinationMilvusProcessingConfigModel","DestinationMilvusSchemasEmbeddingEmbedding5Mode","DestinationMilvusSchemasEmbeddingEmbeddingMode","DestinationMilvusSchemasEmbeddingMode","DestinationMilvusSchemasIndexingAuthAuthenticationMode","DestinationMilvusSchemasIndexingAuthMode","DestinationMilvusSchemasIndexingMode","DestinationMilvusSchemasMode","DestinationMilvusSchemasProcessingMode","DestinationMilvusSchemasProcessingTextSplitterMode","DestinationMilvusSchemasProcessingTextSplitterTextSplitterMode","DestinationMilvusTextSplitter","DestinationMilvusUsernamePassword","DestinationMongodb","DestinationMongodbAuthorization","DestinationMongodbInstance","DestinationMongodbNoTunnel","DestinationMongodbNone","DestinationMongodbPasswordAuthentication","DestinationMongodbSSHKeyAuthentication","DestinationMongodbSSHTunnelMethod","DestinationMongodbSchemasAuthorization","DestinationMongodbSchemasInstance","DestinationMongodbSchemasTunnelMethod","DestinationMongodbSchemasTunnelMethodTunnelMethod","DestinationMongodbTunnelMethod","DestinationMssql","DestinationMssqlNoTunnel","DestinationMssqlPasswordAuthentication","DestinationMssqlSSHKeyAuthentication","DestinationMssqlSSHTunnelMethod","DestinationMssqlSchemasSslMethod","DestinationMssqlSchemasSslMethodSslMethod","DestinationMssqlSchemasTunnelMethod","DestinationMssqlSchemasTunnelMethodTunnelMethod","DestinationMssqlSslMethod","DestinationMssqlTunnelMethod","DestinationMysql","DestinationMysqlNoTunnel","DestinationMysqlPasswordAuthentication","DestinationMysqlSSHKeyAuthentication","DestinationMysqlSSHTunnelMethod","DestinationMysqlSchemasTunnelMethod","DestinationMysqlSchemasTunnelMethodTunnelMethod","DestinationMysqlTunnelMethod","DestinationOracle","DestinationOracleEncryptionMethod","DestinationOracleNoTunnel","DestinationOraclePasswordAuthentication","DestinationOracleSSHKeyAuthentication","DestinationOracleSSHTunnelMethod","DestinationOracleSchemasEncryptionMethod","DestinationOracleSchemasTunnelMethod","DestinationOracleSchemasTunnelMethodTunnelMethod","DestinationOracleTunnelMethod","DestinationOracleUnencrypted","DestinationPatchRequest","DestinationPgvector","DestinationPgvectorAzureOpenAI","DestinationPgvectorByMarkdownHeader","DestinationPgvectorByProgrammingLanguage","DestinationPgvectorBySeparator","DestinationPgvectorCohere","DestinationPgvectorCredentials","DestinationPgvectorEmbedding","DestinationPgvectorFake","DestinationPgvectorFieldNameMappingConfigModel","DestinationPgvectorLanguage","DestinationPgvectorMode","DestinationPgvectorOpenAI","DestinationPgvectorOpenAICompatible","DestinationPgvectorProcessingConfigModel","DestinationPgvectorSchemasEmbeddingEmbedding5Mode","DestinationPgvectorSchemasEmbeddingEmbeddingMode","DestinationPgvectorSchemasEmbeddingMode","DestinationPgvectorSchemasMode","DestinationPgvectorSchemasProcessingMode","DestinationPgvectorSchemasProcessingTextSplitterMode","DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode","DestinationPgvectorTextSplitter","DestinationPinecone","DestinationPineconeAzureOpenAI","DestinationPineconeByMarkdownHeader","DestinationPineconeByProgrammingLanguage","DestinationPineconeBySeparator","DestinationPineconeCohere","DestinationPineconeEmbedding","DestinationPineconeFake","DestinationPineconeFieldNameMappingConfigModel","DestinationPineconeIndexing","DestinationPineconeLanguage","DestinationPineconeMode","DestinationPineconeOpenAI","DestinationPineconeOpenAICompatible","DestinationPineconeProcessingConfigModel","DestinationPineconeSchemasEmbeddingEmbedding5Mode","DestinationPineconeSchemasEmbeddingEmbeddingMode","DestinationPineconeSchemasEmbeddingMode","DestinationPineconeSchemasMode","DestinationPineconeSchemasProcessingMode","DestinationPineconeSchemasProcessingTextSplitterMode","DestinationPineconeSchemasProcessingTextSplitterTextSplitterMode","DestinationPineconeTextSplitter","DestinationPostgres","DestinationPostgresMode","DestinationPostgresNoTunnel","DestinationPostgresPasswordAuthentication","DestinationPostgresSSHKeyAuthentication","DestinationPostgresSSHTunnelMethod","DestinationPostgresSchemasMode","DestinationPostgresSchemasSSLModeSSLModes6Mode","DestinationPostgresSchemasSSLModeSSLModesMode","DestinationPostgresSchemasSslModeMode","DestinationPostgresSchemasTunnelMethod","DestinationPostgresSchemasTunnelMethodTunnelMethod","DestinationPostgresTunnelMethod","DestinationPubsub","DestinationPutRequest","DestinationQdrant","DestinationQdrantAuthenticationMethod","DestinationQdrantAzureOpenAI","DestinationQdrantByMarkdownHeader","DestinationQdrantByProgrammingLanguage","DestinationQdrantBySeparator","DestinationQdrantCohere","DestinationQdrantEmbedding","DestinationQdrantFake","DestinationQdrantFieldNameMappingConfigModel","DestinationQdrantIndexing","DestinationQdrantLanguage","DestinationQdrantMode","DestinationQdrantNoAuth","DestinationQdrantOpenAI","DestinationQdrantOpenAICompatible","DestinationQdrantProcessingConfigModel","DestinationQdrantSchemasEmbeddingEmbedding5Mode","DestinationQdrantSchemasEmbeddingEmbeddingMode","DestinationQdrantSchemasEmbeddingMode","DestinationQdrantSchemasIndexingAuthMethodMode","DestinationQdrantSchemasIndexingMode","DestinationQdrantSchemasMode","DestinationQdrantSchemasProcessingMode","DestinationQdrantSchemasProcessingTextSplitterMode","DestinationQdrantSchemasProcessingTextSplitterTextSplitterMode","DestinationQdrantTextSplitter","DestinationRedis","DestinationRedisDisable","DestinationRedisMode","DestinationRedisNoTunnel","DestinationRedisPasswordAuthentication","DestinationRedisSSHKeyAuthentication","DestinationRedisSSHTunnelMethod","DestinationRedisSSLModes","DestinationRedisSchemasMode","DestinationRedisSchemasTunnelMethod","DestinationRedisSchemasTunnelMethodTunnelMethod","DestinationRedisTunnelMethod","DestinationRedisVerifyFull","DestinationRedshift","DestinationRedshiftMethod","DestinationRedshiftNoTunnel","DestinationRedshiftPasswordAuthentication","DestinationRedshiftS3BucketRegion","DestinationRedshiftSSHKeyAuthentication","DestinationRedshiftSSHTunnelMethod","DestinationRedshiftSchemasTunnelMethod","DestinationRedshiftSchemasTunnelMethodTunnelMethod","DestinationRedshiftTunnelMethod","DestinationResponse","DestinationS3","DestinationS3AvroApacheAvro","DestinationS3Bzip2","DestinationS3CSVCommaSeparatedValues","DestinationS3Codec","DestinationS3Compression","DestinationS3CompressionCodec","DestinationS3CompressionType","DestinationS3Deflate","DestinationS3Flattening","DestinationS3FormatType","DestinationS3GZIP","DestinationS3Glue","DestinationS3GlueCompression","DestinationS3GlueCompressionType","DestinationS3GlueFormatType","DestinationS3GlueGZIP","DestinationS3GlueJSONLinesNewlineDelimitedJSON","DestinationS3GlueNoCompression","DestinationS3GlueOutputFormat","DestinationS3GlueS3BucketRegion","DestinationS3GlueSchemasCompressionType","DestinationS3JSONLinesNewlineDelimitedJSON","DestinationS3NoCompression","DestinationS3OutputFormat","DestinationS3ParquetColumnarStorage","DestinationS3S3BucketRegion","DestinationS3SchemasCodec","DestinationS3SchemasCompression","DestinationS3SchemasCompressionCodec","DestinationS3SchemasCompressionType","DestinationS3SchemasFlattening","DestinationS3SchemasFormatCodec","DestinationS3SchemasFormatCompressionType","DestinationS3SchemasFormatFormatType","DestinationS3SchemasFormatNoCompression","DestinationS3SchemasFormatOutputFormat3Codec","DestinationS3SchemasFormatOutputFormat3CompressionCodecCodec","DestinationS3SchemasFormatOutputFormatCodec","DestinationS3SchemasFormatOutputFormatCompressionType","DestinationS3SchemasFormatOutputFormatFormatType","DestinationS3SchemasFormatType","DestinationS3SchemasGZIP","DestinationS3SchemasNoCompression","DestinationS3Snappy","DestinationS3Xz","DestinationS3Zstandard","DestinationSftpJSON","DestinationSnowflake","DestinationSnowflakeAuthType","DestinationSnowflakeCortex","DestinationSnowflakeCortexAzureOpenAI","DestinationSnowflakeCortexByMarkdownHeader","DestinationSnowflakeCortexByProgrammingLanguage","DestinationSnowflakeCortexBySeparator","DestinationSnowflakeCortexCohere","DestinationSnowflakeCortexCredentials","DestinationSnowflakeCortexEmbedding","DestinationSnowflakeCortexFake","DestinationSnowflakeCortexFieldNameMappingConfigModel","DestinationSnowflakeCortexLanguage","DestinationSnowflakeCortexMode","DestinationSnowflakeCortexOpenAI","DestinationSnowflakeCortexOpenAICompatible","DestinationSnowflakeCortexProcessingConfigModel","DestinationSnowflakeCortexSchemasEmbeddingEmbedding5Mode","DestinationSnowflakeCortexSchemasEmbeddingEmbeddingMode","DestinationSnowflakeCortexSchemasEmbeddingMode","DestinationSnowflakeCortexSchemasMode","DestinationSnowflakeCortexSchemasProcessingMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterMode","DestinationSnowflakeCortexSchemasProcessingTextSplitterTextSplitterMode","DestinationSnowflakeCortexTextSplitter","DestinationSnowflakeOAuth20","DestinationSnowflakeSchemasAuthType","DestinationSnowflakeSchemasCredentialsAuthType","DestinationSnowflakeSnowflake","DestinationTeradata","DestinationTeradataAllow","DestinationTeradataDisable","DestinationTeradataMode","DestinationTeradataPrefer","DestinationTeradataRequire","DestinationTeradataSSLModes","DestinationTeradataSchemasMode","DestinationTeradataSchemasSSLModeSSLModes5Mode","DestinationTeradataSchemasSSLModeSSLModes6Mode","DestinationTeradataSchemasSSLModeSSLModesMode","DestinationTeradataSchemasSslModeMode","DestinationTeradataVerifyCa","DestinationTeradataVerifyFull","DestinationTimeplus","DestinationTypesense","DestinationVectara","DestinationWeaviate","DestinationWeaviateAPIToken","DestinationWeaviateAuthentication","DestinationWeaviateAzureOpenAI","DestinationWeaviateByMarkdownHeader","DestinationWeaviateByProgrammingLanguage","DestinationWeaviateBySeparator","DestinationWeaviateCohere","DestinationWeaviateEmbedding","DestinationWeaviateFake","DestinationWeaviateFieldNameMappingConfigModel","DestinationWeaviateIndexing","DestinationWeaviateLanguage","DestinationWeaviateMode","DestinationWeaviateOpenAI","DestinationWeaviateOpenAICompatible","DestinationWeaviateProcessingConfigModel","DestinationWeaviateSchemasEmbeddingEmbedding5Mode","DestinationWeaviateSchemasEmbeddingEmbedding6Mode","DestinationWeaviateSchemasEmbeddingEmbedding7Mode","DestinationWeaviateSchemasEmbeddingEmbeddingMode","DestinationWeaviateSchemasEmbeddingMode","DestinationWeaviateSchemasIndexingAuthAuthenticationMode","DestinationWeaviateSchemasIndexingAuthMode","DestinationWeaviateSchemasIndexingMode","DestinationWeaviateSchemasMode","DestinationWeaviateSchemasProcessingMode","DestinationWeaviateSchemasProcessingTextSplitterMode","DestinationWeaviateSchemasProcessingTextSplitterTextSplitterMode","DestinationWeaviateTextSplitter","DestinationWeaviateUsernamePassword","DestinationYellowbrick","DestinationYellowbrickAllow","DestinationYellowbrickDisable","DestinationYellowbrickMode","DestinationYellowbrickNoTunnel","DestinationYellowbrickPasswordAuthentication","DestinationYellowbrickPrefer","DestinationYellowbrickRequire","DestinationYellowbrickSSHKeyAuthentication","DestinationYellowbrickSSHTunnelMethod","DestinationYellowbrickSSLModes","DestinationYellowbrickSchemasMode","DestinationYellowbrickSchemasSSLModeSSLModes5Mode","DestinationYellowbrickSchemasSSLModeSSLModes6Mode","DestinationYellowbrickSchemasSSLModeSSLModesMode","DestinationYellowbrickSchemasSslModeMode","DestinationYellowbrickSchemasTunnelMethod","DestinationYellowbrickSchemasTunnelMethodTunnelMethod","DestinationYellowbrickTunnelMethod","DestinationYellowbrickVerifyCa","DestinationYellowbrickVerifyFull","DestinationsResponse","DetailType","DetectChangesWithXminSystemColumn","Dimension","DimensionsFilter","Disable","Disabled","DistanceMetric","Dixa","Dockerhub","DocumentFileTypeFormatExperimental","DomainRegionCode","DoubleValue","Dremio","Duckdb","DynamoDBRegion","Dynamodb","EUBasedAccount","Elasticsearch","Emailoctopus","Embedding","Enabled","EncryptedTrustServerCertificate","EncryptedVerifyCertificate","Encryption","EncryptionAlgorithm","EncryptionMethod","EngagementWindowDays","Enterprise","Environment","ExcelFormat","ExchangeRates","Expression","ExternalTableViaS3","Ezofficeinventory","FacebookMarketing","FacebookMarketingCredentials","Fake","Faker","Fauna","FieldNameMappingConfigModel","File","FileBasedStreamConfig","FileFormat","Filetype","Filter","FilterAppliedWhileFetchingRecordsBasedOnAttributeKeyAndAttributeValueWhichWillBeAppendedOnTheRequestBody","FilterName","FilterType","Firebolt","Firestore","Flattening","Fleetio","Format","FormatType","FormatTypeWildcard","Freshcaller","Freshdesk","Freshsales","FromCSV","FromField","FromValue","Front","GCSBucketRegion","GCSGoogleCloudStorage","GCSStaging","GCSTmpFilesAfterwardProcessing","GainsightPx","Gcs","GeographyEnum","GeographyEnumNoDefault","Getlago","Github","GithubCredentials","Gitlab","GitlabCredentials","Glassfrog","GlobalAccount","Gnews","Goldcast","GoogleAds","GoogleAdsCredentials","GoogleAnalyticsDataAPI","GoogleAnalyticsDataAPICredentials","GoogleCredentials","GoogleDirectory","GoogleDrive","GoogleDriveCredentials","GooglePagespeedInsights","GoogleSearchConsole","GoogleSheets","GoogleSheetsCredentials","GoogleTasks","GoogleWebfonts","Granularity","GranularityForGeoLocationRegion","GranularityForPeriodicReports","Greenhouse","Gridly","GroupBy","Guru","Gzip","HMACKey","HTTPSPublicWeb","HardcodedRecords","Harvest","Header","HeaderDefinitionType","Height","Hibob","HighLevel","Hubplanner","Hubspot","HubspotCredentials","IAMRole","IAMUser","In","InListFilter","Indexing","InitiateOauthRequest","InsightConfig","Insightly","Instagram","Instance","Instatus","Int64Value","Intercom","InvalidCDCPositionBehaviorAdvanced","Ip2whois","Iterable","JSONLinesNewlineDelimitedJSON","Jira","JobCreateRequest","JobResponse","JobStatusEnum","JobTypeEnum","JobsResponse","Jotform","JsonlFormat","K6Cloud","KeyPairAuthentication","Klarna","Klaviyo","Kyve","LSNCommitBehaviour","Language","Launchdarkly","Leadfeeder","Lemlist","Level","LeverHiring","LeverHiringCredentials","LinkedinAds","LinkedinAdsCredentials","LinkedinPages","Linnworks","LoadingMethod","Local","LocalFilesystemLimited","LoginPassword","Lokalise","Looker","Luma","Mailchimp","MailchimpCredentials","Mailgun","MailjetSms","Marketo","Metabase","Method","MetricsFilter","MicrosoftOnedrive","MicrosoftOnedriveCredentials","MicrosoftSharepoint","MicrosoftSharepointCredentials","MicrosoftTeams","MicrosoftTeamsCredentials","Milvus","Mixpanel","Mode","Monday","MondayCredentials","MongoDBAtlas","MongoDBAtlasReplicaSet","MongoDbInstanceType","Mongodb","MongodbV2","Mssql","MyHours","Mysql","NamespaceDefinitionEnum","NamespaceDefinitionEnumNoDefault","NativeNetworkEncryptionNNE","Netsuite","NoAuth","NoAuthentication","NoCompression","NoExternalEmbedding","NoTunnel","NonBreakingSchemaUpdatesBehaviorEnum","NonBreakingSchemaUpdatesBehaviorEnumNoDefault","NoneT","Normalization","NormalizationFlattening","NorthpassLms","NotExpression","Notion","NotionCredentials","Nullable","NumericFilter","Nylas","Nytimes","OAuth","OAuth20","OAuth20Credentials","OAuth20WithPrivateKey","OAuth2AccessToken","OAuth2ConfidentialApplication","OAuth2Recommended","OAuthActorNames","OAuthCredentialsConfiguration","OauthAuthentication","Okta","Omnisend","Onesignal","OpenAI","OpenAICompatible","Operator","OptionTitle","OptionsList","OrGroup","Oracle","Orb","Orbit","OrganizationResponse","OrganizationsResponse","OriginDatacenterOfTheSurveyMonkeyAccount","OutbrainAmplify","OutputFormat","OutputFormatWildcard","Outreach","ParquetColumnarStorage","ParquetFormat","ParsingStrategy","PasswordAuthentication","PaypalTransaction","Paystack","Pendo","Pennylane","PeriodUsedForMostPopularStreams","PermissionCreateRequest","PermissionResponse","PermissionResponseRead","PermissionScope","PermissionType","PermissionUpdateRequest","PermissionsResponse","Persistiq","PersonalAccessToken","PexelsAPI","Pgvector","Picqer","Pinecone","Pinterest","PinterestCredentials","Pipedrive","PivotCategory","Piwik","Planhat","Plugin","Pocket","Pokeapi","PokemonName","PolygonStockAPI","Postgres","PostgresConnection","Posthog","Postmarkapp","Prefer","Preferred","Prestashop","PrivateApp","PrivateToken","Processing","ProcessingConfigModel","ProductCatalog","Productboard","Productive","ProjectSecret","PublicPermissionType","Pubsub","Pypi","Qdrant","Qualaroo","Queries","Railz","ReadChangesUsingBinaryLogCDC","ReadChangesUsingChangeDataCaptureCDC","ReadChangesUsingWriteAheadLogCDC","Recharge","Recreation","Recruitee","Recurly","Reddit","Redis","Redshift","Region","ReplicaSet","ReportConfig","ReportName","ReportOptions","ReportRecordTypes","ReportingDataObject","Require","Required","Retently","RetentlyCredentials","RkiCovid","RoleBasedAuthentication","Rss","S3","S3AmazonWebServices","S3BucketRegion","S3Glue","SCPSecureCopyProtocol","SFTPSecureFileTransferProtocol","SQLInserts","SSHKeyAuthentication","SSHSecureShell","SSHTunnelMethod","SSLMethod","SSLModes","Salesforce","Salesloft","SandboxAccessToken","SapFieldglass","Savvycal","ScanChangesWithUserDefinedCursor","ScheduleTypeEnum","ScheduleTypeWithBasicEnum","SchemeBasicAuth","SchemeClientCredentials","Scryfall","SearchCriteria","SearchScope","Secoda","Security","SelectedFieldInfo","SelfManagedReplicaSet","Sendgrid","Sendinblue","Senseforce","Sentry","SerializationLibrary","ServiceAccount","ServiceAccountKey","ServiceAccountKeyAuthentication","ServiceKeyAuthentication","ServiceName","Sevenshifts","Sftp","SftpBulk","SftpJSON","ShareTypeUsedForMostPopularSharedStream","Shopify","ShopifyAuthorizationMethod","ShopifyCredentials","Shortcut","Shortio","SignInViaGoogleOAuth","SignInViaSlackOAuth","SingleStoreAccessToken","Site","Slack","SlackCredentials","Smaily","Smartengage","Smartsheets","SmartsheetsCredentials","SnapchatMarketing","Snappy","Snowflake","SnowflakeConnection","SnowflakeCortex","SnowflakeCredentials","SonarCloud","SortBy","Source7shifts","SourceAha","SourceAirbyte","SourceAircall","SourceAirtable","SourceAirtableAirtable","SourceAirtableAuthMethod","SourceAirtableAuthentication","SourceAirtableOAuth20","SourceAirtablePersonalAccessToken","SourceAirtableSchemasAuthMethod","SourceAmazonAds","SourceAmazonAdsAmazonAds","SourceAmazonAdsAuthType","SourceAmazonSellerPartner","SourceAmazonSellerPartnerAmazonSellerPartner","SourceAmazonSellerPartnerAuthType","SourceAmazonSqs","SourceAmazonSqsAWSRegion","SourceAmplitude","SourceApifyDataset","SourceAppcues","SourceAppfigures","SourceAppfollow","SourceAsana","SourceAsanaAsana","SourceAsanaCredentialsTitle","SourceAsanaSchemasCredentialsTitle","SourceAuth0","SourceAuth0AuthenticationMethod","SourceAuth0SchemasAuthenticationMethod","SourceAuth0SchemasCredentialsAuthenticationMethod","SourceAwsCloudtrail","SourceAzureBlobStorage","SourceAzureBlobStorageAuthType","SourceAzureBlobStorageAuthentication","SourceAzureBlobStorageAzureBlobStorage","SourceAzureBlobStorageFiletype","SourceAzureBlobStorageHeaderDefinitionType","SourceAzureBlobStorageMode","SourceAzureBlobStorageSchemasAuthType","SourceAzureBlobStorageSchemasFiletype","SourceAzureBlobStorageSchemasHeaderDefinitionType","SourceAzureBlobStorageSchemasStreamsFiletype","SourceAzureBlobStorageSchemasStreamsFormatFiletype","SourceAzureTable","SourceBambooHr","SourceBasecamp","SourceBigquery","SourceBigqueryBigquery","SourceBingAds","SourceBingAdsBingAds","SourceBitly","SourceBraintree","SourceBraintreeEnvironment","SourceBraze","SourceBreezyHr","SourceBrevo","SourceBuildkite","SourceBuzzsprout","SourceCalendly","SourceCanny","SourceCart","SourceCartAuthType","SourceCartAuthorizationMethod","SourceCartSchemasAuthType","SourceChameleon","SourceChargebee","SourceChartmogul","SourceCimis","SourceClazar","SourceClickhouse","SourceClickhouseClickhouse","SourceClickhouseNoTunnel","SourceClickhousePasswordAuthentication","SourceClickhouseSSHKeyAuthentication","SourceClickhouseSSHTunnelMethod","SourceClickhouseSchemasTunnelMethod","SourceClickhouseSchemasTunnelMethodTunnelMethod","SourceClickhouseTunnelMethod","SourceClickupAPI","SourceClockify","SourceCloseCom","SourceCoda","SourceCoinAPI","SourceCoinmarketcap","SourceConfigcat","SourceConfiguration","SourceConfluence","SourceConvex","SourceConvexConvex","SourceCreateRequest","SourceCustomerIo","SourceDatadog","SourceDatascope","SourceDbt","SourceDelighted","SourceDixa","SourceDockerhub","SourceDremio","SourceDynamodb","SourceDynamodbAuthType","SourceDynamodbCredentials","SourceDynamodbDynamodb","SourceDynamodbDynamodbRegion","SourceDynamodbSchemasAuthType","SourceEmailoctopus","SourceExchangeRates","SourceEzofficeinventory","SourceFacebookMarketing","SourceFacebookMarketingActionReportTime","SourceFacebookMarketingAuthType","SourceFacebookMarketingAuthentication","SourceFacebookMarketingFacebookMarketing","SourceFacebookMarketingSchemasAuthType","SourceFacebookMarketingValidEnums","SourceFaker","SourceFauna","SourceFaunaDeletionMode","SourceFaunaSchemasDeletionMode","SourceFile","SourceFileSchemasProviderStorage","SourceFileSchemasProviderStorageProvider6Storage","SourceFileSchemasProviderStorageProvider7Storage","SourceFileSchemasProviderStorageProvider8Storage","SourceFileSchemasProviderStorageProviderStorage","SourceFileSchemasStorage","SourceFileStorage","SourceFirebolt","SourceFireboltFirebolt","SourceFleetio","SourceFreshcaller","SourceFreshdesk","SourceFreshsales","SourceFront","SourceGainsightPx","SourceGcs","SourceGcsAutogenerated","SourceGcsAvroFormat","SourceGcsCSVFormat","SourceGcsCSVHeaderDefinition","SourceGcsFileBasedStreamConfig","SourceGcsFiletype","SourceGcsFormat","SourceGcsFromCSV","SourceGcsGcs","SourceGcsHeaderDefinitionType","SourceGcsJsonlFormat","SourceGcsLocal","SourceGcsMode","SourceGcsParquetFormat","SourceGcsParsingStrategy","SourceGcsProcessing","SourceGcsSchemasFiletype","SourceGcsSchemasHeaderDefinitionType","SourceGcsSchemasMode","SourceGcsSchemasStreamsFiletype","SourceGcsSchemasStreamsFormatFiletype","SourceGcsSchemasStreamsFormatFormat6Filetype","SourceGcsSchemasStreamsFormatFormatFiletype","SourceGcsSchemasStreamsHeaderDefinitionType","SourceGcsUserProvided","SourceGcsValidationPolicy","SourceGetlago","SourceGithub","SourceGithubAuthentication","SourceGithubGithub","SourceGithubOptionTitle","SourceGithubPersonalAccessToken","SourceGitlab","SourceGitlabAuthType","SourceGitlabAuthorizationMethod","SourceGitlabGitlab","SourceGitlabOAuth20","SourceGitlabSchemasAuthType","SourceGlassfrog","SourceGnews","SourceGoldcast","SourceGoogleAds","SourceGoogleAdsGoogleAds","SourceGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIAndGroup","SourceGoogleAnalyticsDataAPIAuthType","SourceGoogleAnalyticsDataAPIBetweenFilter","SourceGoogleAnalyticsDataAPICredentials","SourceGoogleAnalyticsDataAPICustomReportConfig","SourceGoogleAnalyticsDataAPIDisabled","SourceGoogleAnalyticsDataAPIDoubleValue","SourceGoogleAnalyticsDataAPIEnabled","SourceGoogleAnalyticsDataAPIExpression","SourceGoogleAnalyticsDataAPIFilter","SourceGoogleAnalyticsDataAPIFilterName","SourceGoogleAnalyticsDataAPIFilterType","SourceGoogleAnalyticsDataAPIFromValue","SourceGoogleAnalyticsDataAPIGoogleAnalyticsDataAPI","SourceGoogleAnalyticsDataAPIGranularity","SourceGoogleAnalyticsDataAPIInListFilter","SourceGoogleAnalyticsDataAPIInt64Value","SourceGoogleAnalyticsDataAPINotExpression","SourceGoogleAnalyticsDataAPINumericFilter","SourceGoogleAnalyticsDataAPIOrGroup","SourceGoogleAnalyticsDataAPISchemasAuthType","SourceGoogleAnalyticsDataAPISchemasBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayEnabled","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilter3ValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ExpressionsValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter2ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3BetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3DoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ToValueValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Filter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3InListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Int64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3NumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3StringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4ValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterBetweenFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterDoubleValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterExpression","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterName","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFilterType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterFromValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInListFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterInt64Value","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValueType","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayToValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValidEnums","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue","SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValueType","SourceGoogleAnalyticsDataAPISchemasDoubleValue","SourceGoogleAnalyticsDataAPISchemasEnabled","SourceGoogleAnalyticsDataAPISchemasExpression","SourceGoogleAnalyticsDataAPISchemasFilter","SourceGoogleAnalyticsDataAPISchemasFilterName","SourceGoogleAnalyticsDataAPISchemasFilterType","SourceGoogleAnalyticsDataAPISchemasFromValue","SourceGoogleAnalyticsDataAPISchemasInListFilter","SourceGoogleAnalyticsDataAPISchemasInt64Value","SourceGoogleAnalyticsDataAPISchemasNumericFilter","SourceGoogleAnalyticsDataAPISchemasStringFilter","SourceGoogleAnalyticsDataAPISchemasToValue","SourceGoogleAnalyticsDataAPISchemasValidEnums","SourceGoogleAnalyticsDataAPISchemasValue","SourceGoogleAnalyticsDataAPISchemasValueType","SourceGoogleAnalyticsDataAPIServiceAccountKeyAuthentication","SourceGoogleAnalyticsDataAPIStringFilter","SourceGoogleAnalyticsDataAPIToValue","SourceGoogleAnalyticsDataAPIValidEnums","SourceGoogleAnalyticsDataAPIValue","SourceGoogleAnalyticsDataAPIValueType","SourceGoogleDirectory","SourceGoogleDirectoryCredentialsTitle","SourceGoogleDirectoryGoogleCredentials","SourceGoogleDirectorySchemasCredentialsTitle","SourceGoogleDrive","SourceGoogleDriveAuthType","SourceGoogleDriveAuthenticateViaGoogleOAuth","SourceGoogleDriveAuthentication","SourceGoogleDriveAutogenerated","SourceGoogleDriveAvroFormat","SourceGoogleDriveCSVFormat","SourceGoogleDriveCSVHeaderDefinition","SourceGoogleDriveDocumentFileTypeFormatExperimental","SourceGoogleDriveFileBasedStreamConfig","SourceGoogleDriveFiletype","SourceGoogleDriveFormat","SourceGoogleDriveFromCSV","SourceGoogleDriveGoogleDrive","SourceGoogleDriveHeaderDefinitionType","SourceGoogleDriveJsonlFormat","SourceGoogleDriveLocal","SourceGoogleDriveMode","SourceGoogleDriveParquetFormat","SourceGoogleDriveParsingStrategy","SourceGoogleDriveProcessing","SourceGoogleDriveSchemasAuthType","SourceGoogleDriveSchemasFiletype","SourceGoogleDriveSchemasHeaderDefinitionType","SourceGoogleDriveSchemasStreamsFiletype","SourceGoogleDriveSchemasStreamsFormatFiletype","SourceGoogleDriveSchemasStreamsFormatFormatFiletype","SourceGoogleDriveSchemasStreamsHeaderDefinitionType","SourceGoogleDriveServiceAccountKeyAuthentication","SourceGoogleDriveUserProvided","SourceGoogleDriveValidationPolicy","SourceGooglePagespeedInsights","SourceGoogleSearchConsole","SourceGoogleSearchConsoleAuthType","SourceGoogleSearchConsoleCustomReportConfig","SourceGoogleSearchConsoleGoogleSearchConsole","SourceGoogleSearchConsoleOAuth","SourceGoogleSearchConsoleSchemasAuthType","SourceGoogleSearchConsoleServiceAccountKeyAuthentication","SourceGoogleSearchConsoleValidEnums","SourceGoogleSheets","SourceGoogleSheetsAuthType","SourceGoogleSheetsAuthenticateViaGoogleOAuth","SourceGoogleSheetsAuthentication","SourceGoogleSheetsGoogleSheets","SourceGoogleSheetsSchemasAuthType","SourceGoogleSheetsServiceAccountKeyAuthentication","SourceGoogleTasks","SourceGoogleWebfonts","SourceGreenhouse","SourceGridly","SourceGuru","SourceHardcodedRecords","SourceHarvest","SourceHarvestAuthType","SourceHarvestAuthenticateWithPersonalAccessToken","SourceHarvestAuthenticationMechanism","SourceHarvestSchemasAuthType","SourceHeight","SourceHibob","SourceHighLevel","SourceHubplanner","SourceHubspot","SourceHubspotAuthType","SourceHubspotAuthentication","SourceHubspotHubspot","SourceHubspotOAuth","SourceHubspotSchemasAuthType","SourceInsightly","SourceInstagram","SourceInstagramInstagram","SourceInstatus","SourceIntercom","SourceIntercomIntercom","SourceIp2whois","SourceIterable","SourceJira","SourceJotform","SourceJotformAPIEndpoint","SourceJotformSchemasAPIEndpoint","SourceK6Cloud","SourceKlarna","SourceKlarnaRegion","SourceKlaviyo","SourceKyve","SourceLaunchdarkly","SourceLeadfeeder","SourceLemlist","SourceLeverHiring","SourceLeverHiringAuthType","SourceLeverHiringAuthenticationMechanism","SourceLeverHiringEnvironment","SourceLeverHiringLeverHiring","SourceLeverHiringSchemasAuthType","SourceLinkedinAds","SourceLinkedinAdsAuthMethod","SourceLinkedinAdsAuthentication","SourceLinkedinAdsLinkedinAds","SourceLinkedinAdsOAuth20","SourceLinkedinAdsSchemasAuthMethod","SourceLinkedinPages","SourceLinkedinPagesAccessToken","SourceLinkedinPagesAuthMethod","SourceLinkedinPagesAuthentication","SourceLinkedinPagesOAuth20","SourceLinkedinPagesSchemasAuthMethod","SourceLinnworks","SourceLokalise","SourceLooker","SourceLuma","SourceMailchimp","SourceMailchimpAuthType","SourceMailchimpAuthentication","SourceMailchimpMailchimp","SourceMailchimpOAuth20","SourceMailchimpSchemasAuthType","SourceMailgun","SourceMailjetSms","SourceMarketo","SourceMetabase","SourceMicrosoftOnedrive","SourceMicrosoftOnedriveAuthType","SourceMicrosoftOnedriveAuthentication","SourceMicrosoftOnedriveAutogenerated","SourceMicrosoftOnedriveAvroFormat","SourceMicrosoftOnedriveCSVFormat","SourceMicrosoftOnedriveCSVHeaderDefinition","SourceMicrosoftOnedriveFileBasedStreamConfig","SourceMicrosoftOnedriveFiletype","SourceMicrosoftOnedriveFormat","SourceMicrosoftOnedriveFromCSV","SourceMicrosoftOnedriveHeaderDefinitionType","SourceMicrosoftOnedriveJsonlFormat","SourceMicrosoftOnedriveLocal","SourceMicrosoftOnedriveMicrosoftOnedrive","SourceMicrosoftOnedriveMode","SourceMicrosoftOnedriveParquetFormat","SourceMicrosoftOnedriveParsingStrategy","SourceMicrosoftOnedriveProcessing","SourceMicrosoftOnedriveSchemasAuthType","SourceMicrosoftOnedriveSchemasFiletype","SourceMicrosoftOnedriveSchemasHeaderDefinitionType","SourceMicrosoftOnedriveSchemasStreamsFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsFormatFormatFiletype","SourceMicrosoftOnedriveSchemasStreamsHeaderDefinitionType","SourceMicrosoftOnedriveUnstructuredDocumentFormat","SourceMicrosoftOnedriveUserProvided","SourceMicrosoftOnedriveValidationPolicy","SourceMicrosoftSharepoint","SourceMicrosoftSharepointAuthType","SourceMicrosoftSharepointAuthenticateViaMicrosoftOAuth","SourceMicrosoftSharepointAuthentication","SourceMicrosoftSharepointAutogenerated","SourceMicrosoftSharepointAvroFormat","SourceMicrosoftSharepointCSVFormat","SourceMicrosoftSharepointCSVHeaderDefinition","SourceMicrosoftSharepointExcelFormat","SourceMicrosoftSharepointFileBasedStreamConfig","SourceMicrosoftSharepointFiletype","SourceMicrosoftSharepointFormat","SourceMicrosoftSharepointFromCSV","SourceMicrosoftSharepointHeaderDefinitionType","SourceMicrosoftSharepointJsonlFormat","SourceMicrosoftSharepointLocal","SourceMicrosoftSharepointMicrosoftSharepoint","SourceMicrosoftSharepointMode","SourceMicrosoftSharepointParquetFormat","SourceMicrosoftSharepointParsingStrategy","SourceMicrosoftSharepointProcessing","SourceMicrosoftSharepointSchemasAuthType","SourceMicrosoftSharepointSchemasFiletype","SourceMicrosoftSharepointSchemasHeaderDefinitionType","SourceMicrosoftSharepointSchemasStreamsFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFiletype","SourceMicrosoftSharepointSchemasStreamsFormatFormat6Filetype","SourceMicrosoftSharepointSchemasStreamsFormatFormatFiletype","SourceMicrosoftSharepointSchemasStreamsHeaderDefinitionType","SourceMicrosoftSharepointSearchScope","SourceMicrosoftSharepointServiceKeyAuthentication","SourceMicrosoftSharepointUnstructuredDocumentFormat","SourceMicrosoftSharepointUserProvided","SourceMicrosoftSharepointValidationPolicy","SourceMicrosoftTeams","SourceMicrosoftTeamsAuthType","SourceMicrosoftTeamsAuthenticationMechanism","SourceMicrosoftTeamsMicrosoftTeams","SourceMicrosoftTeamsSchemasAuthType","SourceMixpanel","SourceMixpanelOptionTitle","SourceMixpanelRegion","SourceMixpanelSchemasOptionTitle","SourceMonday","SourceMondayAuthType","SourceMondayAuthorizationMethod","SourceMondayMonday","SourceMondayOAuth20","SourceMondaySchemasAuthType","SourceMongodbV2","SourceMongodbV2ClusterType","SourceMongodbV2SchemasClusterType","SourceMssql","SourceMssqlEncryptedTrustServerCertificate","SourceMssqlEncryptedVerifyCertificate","SourceMssqlInvalidCDCPositionBehaviorAdvanced","SourceMssqlMethod","SourceMssqlMssql","SourceMssqlNoTunnel","SourceMssqlPasswordAuthentication","SourceMssqlSSHKeyAuthentication","SourceMssqlSSHTunnelMethod","SourceMssqlSSLMethod","SourceMssqlSchemasMethod","SourceMssqlSchemasSSLMethodSSLMethodSSLMethod","SourceMssqlSchemasSslMethod","SourceMssqlSchemasSslMethodSslMethod","SourceMssqlSchemasTunnelMethod","SourceMssqlSchemasTunnelMethodTunnelMethod","SourceMssqlTunnelMethod","SourceMssqlUnencrypted","SourceMyHours","SourceMysql","SourceMysqlInvalidCDCPositionBehaviorAdvanced","SourceMysqlMethod","SourceMysqlMode","SourceMysqlMysql","SourceMysqlNoTunnel","SourceMysqlPasswordAuthentication","SourceMysqlSSHKeyAuthentication","SourceMysqlSSHTunnelMethod","SourceMysqlSSLModes","SourceMysqlScanChangesWithUserDefinedCursor","SourceMysqlSchemasMethod","SourceMysqlSchemasMode","SourceMysqlSchemasSSLModeSSLModesMode","SourceMysqlSchemasSslModeMode","SourceMysqlSchemasTunnelMethod","SourceMysqlSchemasTunnelMethodTunnelMethod","SourceMysqlTunnelMethod","SourceMysqlUpdateMethod","SourceMysqlVerifyCA","SourceNetsuite","SourceNorthpassLms","SourceNotion","SourceNotionAccessToken","SourceNotionAuthType","SourceNotionAuthenticationMethod","SourceNotionNotion","SourceNotionOAuth20","SourceNotionSchemasAuthType","SourceNylas","SourceNytimes","SourceOkta","SourceOktaAPIToken","SourceOktaAuthType","SourceOktaAuthorizationMethod","SourceOktaOAuth20","SourceOktaSchemasAuthType","SourceOktaSchemasCredentialsAuthType","SourceOmnisend","SourceOnesignal","SourceOracle","SourceOracleConnectionType","SourceOracleEncryption","SourceOracleEncryptionAlgorithm","SourceOracleEncryptionMethod","SourceOracleNativeNetworkEncryptionNNE","SourceOracleNoTunnel","SourceOracleOracle","SourceOraclePasswordAuthentication","SourceOracleSSHKeyAuthentication","SourceOracleSSHTunnelMethod","SourceOracleSchemasEncryptionEncryptionMethod","SourceOracleSchemasEncryptionMethod","SourceOracleSchemasTunnelMethod","SourceOracleSchemasTunnelMethodTunnelMethod","SourceOracleTLSEncryptedVerifyCertificate","SourceOracleTunnelMethod","SourceOracleUnencrypted","SourceOrb","SourceOrbit","SourceOutbrainAmplify","SourceOutbrainAmplifyAccessToken","SourceOutbrainAmplifyAuthenticationMethod","SourceOutbrainAmplifyUsernamePassword","SourceOutreach","SourcePatchRequest","SourcePaypalTransaction","SourcePaystack","SourcePendo","SourcePennylane","SourcePersistiq","SourcePexelsAPI","SourcePicqer","SourcePinterest","SourcePinterestAuthMethod","SourcePinterestLevel","SourcePinterestPinterest","SourcePinterestSchemasValidEnums","SourcePinterestValidEnums","SourcePipedrive","SourcePiwik","SourcePlanhat","SourcePocket","SourcePocketSortBy","SourcePokeapi","SourcePolygonStockAPI","SourcePostgres","SourcePostgresAllow","SourcePostgresDisable","SourcePostgresInvalidCDCPositionBehaviorAdvanced","SourcePostgresMethod","SourcePostgresMode","SourcePostgresNoTunnel","SourcePostgresPasswordAuthentication","SourcePostgresPostgres","SourcePostgresPrefer","SourcePostgresRequire","SourcePostgresSSHKeyAuthentication","SourcePostgresSSHTunnelMethod","SourcePostgresSSLModes","SourcePostgresScanChangesWithUserDefinedCursor","SourcePostgresSchemasMethod","SourcePostgresSchemasMode","SourcePostgresSchemasReplicationMethodMethod","SourcePostgresSchemasSSLModeSSLModes5Mode","SourcePostgresSchemasSSLModeSSLModes6Mode","SourcePostgresSchemasSSLModeSSLModesMode","SourcePostgresSchemasSslModeMode","SourcePostgresSchemasTunnelMethod","SourcePostgresSchemasTunnelMethodTunnelMethod","SourcePostgresTunnelMethod","SourcePostgresUpdateMethod","SourcePostgresVerifyCa","SourcePostgresVerifyFull","SourcePosthog","SourcePostmarkapp","SourcePrestashop","SourceProductboard","SourceProductive","SourcePutRequest","SourcePypi","SourceQualaroo","SourceRailz","SourceRecharge","SourceRecreation","SourceRecruitee","SourceRecurly","SourceReddit","SourceRedshift","SourceRedshiftRedshift","SourceResponse","SourceRetently","SourceRetentlyAuthType","SourceRetentlyAuthenticationMechanism","SourceRetentlyRetently","SourceRetentlySchemasAuthType","SourceRkiCovid","SourceRss","SourceS3","SourceS3Autogenerated","SourceS3AvroFormat","SourceS3CSVFormat","SourceS3CSVHeaderDefinition","SourceS3FileBasedStreamConfig","SourceS3Filetype","SourceS3Format","SourceS3FromCSV","SourceS3HeaderDefinitionType","SourceS3JsonlFormat","SourceS3Local","SourceS3Mode","SourceS3ParquetFormat","SourceS3ParsingStrategy","SourceS3Processing","SourceS3S3","SourceS3SchemasFiletype","SourceS3SchemasHeaderDefinitionType","SourceS3SchemasStreamsFiletype","SourceS3SchemasStreamsFormatFiletype","SourceS3SchemasStreamsFormatFormatFiletype","SourceS3SchemasStreamsHeaderDefinitionType","SourceS3UnstructuredDocumentFormat","SourceS3UserProvided","SourceS3ValidationPolicy","SourceSalesforce","SourceSalesforceSalesforce","SourceSalesloft","SourceSalesloftAuthType","SourceSalesloftCredentials","SourceSalesloftSchemasAuthType","SourceSapFieldglass","SourceSavvycal","SourceScryfall","SourceSecoda","SourceSendgrid","SourceSendinblue","SourceSenseforce","SourceSentry","SourceSftp","SourceSftpAuthMethod","SourceSftpAuthentication","SourceSftpBulk","SourceSftpBulkAPIParameterConfigModel","SourceSftpBulkAuthType","SourceSftpBulkAuthentication","SourceSftpBulkAutogenerated","SourceSftpBulkAvroFormat","SourceSftpBulkCSVFormat","SourceSftpBulkCSVHeaderDefinition","SourceSftpBulkExcelFormat","SourceSftpBulkFileBasedStreamConfig","SourceSftpBulkFiletype","SourceSftpBulkFormat","SourceSftpBulkFromCSV","SourceSftpBulkHeaderDefinitionType","SourceSftpBulkJsonlFormat","SourceSftpBulkLocal","SourceSftpBulkMode","SourceSftpBulkParquetFormat","SourceSftpBulkParsingStrategy","SourceSftpBulkProcessing","SourceSftpBulkSchemasAuthType","SourceSftpBulkSchemasFiletype","SourceSftpBulkSchemasHeaderDefinitionType","SourceSftpBulkSchemasMode","SourceSftpBulkSchemasStreamsFiletype","SourceSftpBulkSchemasStreamsFormatFiletype","SourceSftpBulkSchemasStreamsFormatFormat6Filetype","SourceSftpBulkSchemasStreamsFormatFormatFiletype","SourceSftpBulkSchemasStreamsHeaderDefinitionType","SourceSftpBulkUnstructuredDocumentFormat","SourceSftpBulkUserProvided","SourceSftpBulkValidationPolicy","SourceSftpBulkViaAPI","SourceSftpPasswordAuthentication","SourceSftpSSHKeyAuthentication","SourceSftpSchemasAuthMethod","SourceShopify","SourceShopifyAuthMethod","SourceShopifyOAuth20","SourceShopifySchemasAuthMethod","SourceShopifyShopify","SourceShortcut","SourceShortio","SourceSlack","SourceSlackAPIToken","SourceSlackAuthenticationMechanism","SourceSlackOptionTitle","SourceSlackSchemasOptionTitle","SourceSlackSlack","SourceSmaily","SourceSmartengage","SourceSmartsheets","SourceSmartsheetsAuthType","SourceSmartsheetsAuthorizationMethod","SourceSmartsheetsOAuth20","SourceSmartsheetsSchemasAuthType","SourceSmartsheetsSmartsheets","SourceSnapchatMarketing","SourceSnapchatMarketingSnapchatMarketing","SourceSnowflake","SourceSnowflakeAuthType","SourceSnowflakeAuthorizationMethod","SourceSnowflakeKeyPairAuthentication","SourceSnowflakeOAuth20","SourceSnowflakeSchemasAuthType","SourceSnowflakeSchemasCredentialsAuthType","SourceSnowflakeSnowflake","SourceSnowflakeUsernameAndPassword","SourceSonarCloud","SourceSpacexAPI","SourceSquare","SourceSquareAPIKey","SourceSquareAuthType","SourceSquareAuthentication","SourceSquareSchemasAuthType","SourceSquareSquare","SourceStrava","SourceStravaAuthType","SourceStripe","SourceSurveySparrow","SourceSurveySparrowURLBase","SourceSurveymonkey","SourceSurveymonkeyAuthMethod","SourceSurveymonkeySurveymonkey","SourceSurvicate","SourceTeamwork","SourceTempo","SourceTheGuardianAPI","SourceTiktokMarketing","SourceTiktokMarketingAuthType","SourceTiktokMarketingAuthenticationMethod","SourceTiktokMarketingOAuth20","SourceTiktokMarketingSchemasAuthType","SourceTiktokMarketingTiktokMarketing","SourceTrello","SourceTrustpilot","SourceTrustpilotAPIKey","SourceTrustpilotAuthType","SourceTrustpilotAuthorizationMethod","SourceTrustpilotOAuth20","SourceTrustpilotSchemasAuthType","SourceTvmazeSchedule","SourceTwilio","SourceTwilioTaskrouter","SourceTwitter","SourceTypeform","SourceTypeformAuthType","SourceTypeformAuthorizationMethod","SourceTypeformOAuth20","SourceTypeformPrivateToken","SourceTypeformSchemasAuthType","SourceTypeformTypeform","SourceUsCensus","SourceVantage","SourceWebflow","SourceWhenIWork","SourceWhiskyHunter","SourceWikipediaPageviews","SourceWoocommerce","SourceXkcd","SourceYandexMetrica","SourceYotpo","SourceYoutubeAnalytics","SourceYoutubeAnalyticsYoutubeAnalytics","SourceZendeskChat","SourceZendeskChatAccessToken","SourceZendeskChatAuthorizationMethod","SourceZendeskChatCredentials","SourceZendeskChatOAuth20","SourceZendeskChatSchemasCredentials","SourceZendeskChatZendeskChat","SourceZendeskSunshine","SourceZendeskSunshineAPIToken","SourceZendeskSunshineAuthMethod","SourceZendeskSunshineAuthorizationMethod","SourceZendeskSunshineOAuth20","SourceZendeskSunshineSchemasAuthMethod","SourceZendeskSunshineZendeskSunshine","SourceZendeskSupport","SourceZendeskSupportAPIToken","SourceZendeskSupportAuthentication","SourceZendeskSupportCredentials","SourceZendeskSupportOAuth20","SourceZendeskSupportSchemasCredentials","SourceZendeskSupportZendeskSupport","SourceZendeskTalk","SourceZendeskTalkAPIToken","SourceZendeskTalkAuthType","SourceZendeskTalkAuthentication","SourceZendeskTalkOAuth20","SourceZendeskTalkSchemasAuthType","SourceZendeskTalkZendeskTalk","SourceZenloop","SourceZohoCrm","SourceZohoCrmEnvironment","SourceZoom","SourcesResponse","SpacexAPI","Square","SquareCredentials","StandaloneMongoDbInstance","State","StateFilter","Status","Storage","StorageProvider","Strategies","Strava","StreamConfiguration","StreamConfigurations","StreamProperties","StreamsCriteria","StringFilter","Stripe","SurveyMonkeyAuthorizationMethod","SurveySparrow","Surveymonkey","SurveymonkeyCredentials","Survicate","SwipeUpAttributionWindow","SystemIDSID","TLSEncryptedVerifyCertificate","TargetsType","Teamwork","Tempo","Teradata","TextSplitter","TheGuardianAPI","TiktokMarketing","TiktokMarketingCredentials","TimeGranularity","TimeGranularityType","Timeplus","ToValue","TopHeadlinesTopic","TransformationQueryRunType","Trello","Trustpilot","TunnelMethod","TvmazeSchedule","Twilio","TwilioTaskrouter","Twitter","Typeform","TypeformCredentials","Typesense","URLBase","Unencrypted","UnitOfMeasure","UnstructuredDocumentFormat","UpdateMethod","UploadingMethod","UsCensus","UserProvided","UserResponse","UsernameAndPassword","UsernamePassword","UsersResponse","ValidActionBreakdowns","ValidAdSetStatuses","ValidAdStatuses","ValidBreakdowns","ValidCampaignStatuses","ValidationPolicy","Validenums","Value","ValueType","Vantage","Vectara","VerifyCa","VerifyFull","VerifyIdentity","ViaAPI","ViewAttributionWindow","ViewWindowDays","Weaviate","Webflow","WhenIWork","WhiskyHunter","WikipediaPageviews","Woocommerce","WorkspaceCreateRequest","WorkspaceOAuthCredentialsRequest","WorkspaceResponse","WorkspaceUpdateRequest","WorkspacesResponse","Xkcd","Xz","YandexMetrica","Yellowbrick","Yotpo","YoutubeAnalytics","YoutubeAnalyticsCredentials","ZendeskChat","ZendeskChatCredentials","ZendeskSunshine","ZendeskSunshineCredentials","ZendeskSupport","ZendeskSupportCredentials","ZendeskTalk","ZendeskTalkCredentials","Zenloop","ZohoCRMEdition","ZohoCrm","Zoom","Zstandard"] diff --git a/src/airbyte_api/models/destination_clickhouse.py b/src/airbyte_api/models/destination_clickhouse.py index 07a405b4..e9c5a522 100644 --- a/src/airbyte_api/models/destination_clickhouse.py +++ b/src/airbyte_api/models/destination_clickhouse.py @@ -88,6 +88,8 @@ class DestinationClickhouse: r"""HTTP port of the database.""" raw_data_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_schema'), 'exclude': lambda f: f is None }}) r"""The schema to write raw tables into (default: airbyte_internal)""" + ssl: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) + r"""Encrypt data using SSL.""" tunnel_method: Optional[SSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" diff --git a/src/airbyte_api/models/destination_dev_null.py b/src/airbyte_api/models/destination_dev_null.py deleted file mode 100644 index 909bdffe..00000000 --- a/src/airbyte_api/models/destination_dev_null.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final, Optional, Union - - -class DevNull(str, Enum): - DEV_NULL = 'dev-null' - - -class TestDestinationType(str, Enum): - SILENT = 'SILENT' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Silent: - TEST_DESTINATION_TYPE: Final[Optional[TestDestinationType]] = dataclasses.field(default=TestDestinationType.SILENT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('test_destination_type'), 'exclude': lambda f: f is None }}) - - - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DestinationDevNull: - test_destination: TestDestination = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('test_destination') }}) - r"""The type of destination to be used""" - DESTINATION_TYPE: Final[DevNull] = dataclasses.field(default=DevNull.DEV_NULL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) - - - -TestDestination = Union[Silent] diff --git a/src/airbyte_api/models/destination_elasticsearch.py b/src/airbyte_api/models/destination_elasticsearch.py index 168042d5..8225280e 100644 --- a/src/airbyte_api/models/destination_elasticsearch.py +++ b/src/airbyte_api/models/destination_elasticsearch.py @@ -8,7 +8,7 @@ from typing import Final, Optional, Union -class DestinationElasticsearchSchemasMethod(str, Enum): +class DestinationElasticsearchSchemasAuthenticationMethodMethod(str, Enum): BASIC = 'basic' @@ -20,12 +20,12 @@ class UsernamePassword: r"""Basic auth password to access a secure Elasticsearch server""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""Basic auth username to access a secure Elasticsearch server""" - METHOD: Final[DestinationElasticsearchSchemasMethod] = dataclasses.field(default=DestinationElasticsearchSchemasMethod.BASIC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + METHOD: Final[DestinationElasticsearchSchemasAuthenticationMethodMethod] = dataclasses.field(default=DestinationElasticsearchSchemasAuthenticationMethodMethod.BASIC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) -class DestinationElasticsearchMethod(str, Enum): +class DestinationElasticsearchSchemasMethod(str, Enum): SECRET = 'secret' @@ -37,7 +37,20 @@ class APIKeySecret: r"""The Key ID to used when accessing an enterprise Elasticsearch instance.""" api_key_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('apiKeySecret') }}) r"""The secret associated with the API Key ID.""" - METHOD: Final[DestinationElasticsearchMethod] = dataclasses.field(default=DestinationElasticsearchMethod.SECRET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + METHOD: Final[DestinationElasticsearchSchemasMethod] = dataclasses.field(default=DestinationElasticsearchSchemasMethod.SECRET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) + + + + +class DestinationElasticsearchMethod(str, Enum): + NONE = 'none' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class NoneT: + r"""No authentication will be used""" + METHOD: Final[DestinationElasticsearchMethod] = dataclasses.field(default=DestinationElasticsearchMethod.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('method') }}) @@ -46,6 +59,64 @@ class Elasticsearch(str, Enum): ELASTICSEARCH = 'elasticsearch' +class DestinationElasticsearchSchemasTunnelMethodTunnelMethod(str, Enum): + r"""Connect through a jump server tunnel host using username and password authentication""" + SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationElasticsearchPasswordAuthentication: + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host""" + tunnel_user_password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user_password') }}) + r"""OS-level password for logging into the jump server host""" + TUNNEL_METHOD: Final[DestinationElasticsearchSchemasTunnelMethodTunnelMethod] = dataclasses.field(default=DestinationElasticsearchSchemasTunnelMethodTunnelMethod.SSH_PASSWORD_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""Connect through a jump server tunnel host using username and password authentication""" + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class DestinationElasticsearchSchemasTunnelMethod(str, Enum): + r"""Connect through a jump server tunnel host using username and ssh key""" + SSH_KEY_AUTH = 'SSH_KEY_AUTH' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationElasticsearchSSHKeyAuthentication: + ssh_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssh_key') }}) + r"""OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )""" + tunnel_host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_host') }}) + r"""Hostname of the jump server host that allows inbound ssh tunnel.""" + tunnel_user: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_user') }}) + r"""OS-level username for logging into the jump server host.""" + TUNNEL_METHOD: Final[DestinationElasticsearchSchemasTunnelMethod] = dataclasses.field(default=DestinationElasticsearchSchemasTunnelMethod.SSH_KEY_AUTH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""Connect through a jump server tunnel host using username and ssh key""" + tunnel_port: Optional[int] = dataclasses.field(default=22, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_port'), 'exclude': lambda f: f is None }}) + r"""Port on the proxy/jump server that accepts inbound ssh connections.""" + + + + +class DestinationElasticsearchTunnelMethod(str, Enum): + r"""No ssh tunnel needed to connect to database""" + NO_TUNNEL = 'NO_TUNNEL' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationElasticsearchNoTunnel: + TUNNEL_METHOD: Final[DestinationElasticsearchTunnelMethod] = dataclasses.field(default=DestinationElasticsearchTunnelMethod.NO_TUNNEL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method') }}) + r"""No ssh tunnel needed to connect to database""" + + + + @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class DestinationElasticsearch: @@ -56,9 +127,13 @@ class DestinationElasticsearch: ca_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ca_certificate'), 'exclude': lambda f: f is None }}) r"""CA certificate""" DESTINATION_TYPE: Final[Elasticsearch] = dataclasses.field(default=Elasticsearch.ELASTICSEARCH, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + tunnel_method: Optional[DestinationElasticsearchSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) + r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" upsert: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('upsert'), 'exclude': lambda f: f is None }}) r"""If a primary key identifier is defined in the source, an upsert will be performed using the primary key value as the elasticsearch doc id. Does not support composite primary keys.""" -AuthenticationMethod = Union[APIKeySecret, UsernamePassword] +AuthenticationMethod = Union[NoneT, APIKeySecret, UsernamePassword] + +DestinationElasticsearchSSHTunnelMethod = Union[DestinationElasticsearchNoTunnel, DestinationElasticsearchSSHKeyAuthentication, DestinationElasticsearchPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_mongodb.py b/src/airbyte_api/models/destination_mongodb.py index ed563ebc..7da1b7de 100644 --- a/src/airbyte_api/models/destination_mongodb.py +++ b/src/airbyte_api/models/destination_mongodb.py @@ -31,7 +31,7 @@ class DestinationMongodbSchemasAuthorization(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class NoneT: +class DestinationMongodbNone: r"""None.""" AUTHORIZATION: Final[DestinationMongodbSchemasAuthorization] = dataclasses.field(default=DestinationMongodbSchemasAuthorization.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }}) @@ -84,6 +84,8 @@ class StandaloneMongoDbInstance: instance: Optional[Instance] = dataclasses.field(default=Instance.STANDALONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('instance'), 'exclude': lambda f: f is None }}) port: Optional[int] = dataclasses.field(default=27017, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) r"""The Port of a Mongo database to be replicated.""" + tls: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tls'), 'exclude': lambda f: f is None }}) + r"""Indicates whether TLS encryption protocol will be used to connect to MongoDB. It is recommended to use TLS connection if possible. For more information see documentation.""" @@ -161,7 +163,7 @@ class DestinationMongodb: -AuthorizationType = Union[NoneT, LoginPassword] +AuthorizationType = Union[DestinationMongodbNone, LoginPassword] MongoDbInstanceType = Union[StandaloneMongoDbInstance, ReplicaSet, MongoDBAtlas] diff --git a/src/airbyte_api/models/destination_mssql.py b/src/airbyte_api/models/destination_mssql.py index e50ed104..7a2896b9 100644 --- a/src/airbyte_api/models/destination_mssql.py +++ b/src/airbyte_api/models/destination_mssql.py @@ -12,7 +12,7 @@ class Mssql(str, Enum): MSSQL = 'mssql' -class DestinationMssqlSchemasSslMethod(str, Enum): +class DestinationMssqlSchemasSslMethodSslMethod(str, Enum): ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' @@ -22,12 +22,12 @@ class EncryptedVerifyCertificate: r"""Verify and use the certificate provided by the server.""" host_name_in_certificate: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hostNameInCertificate'), 'exclude': lambda f: f is None }}) r"""Specifies the host name of the server. The value of this property must match the subject property of the certificate.""" - SSL_METHOD: Final[Optional[DestinationMssqlSchemasSslMethod]] = dataclasses.field(default=DestinationMssqlSchemasSslMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) + SSL_METHOD: Final[Optional[DestinationMssqlSchemasSslMethodSslMethod]] = dataclasses.field(default=DestinationMssqlSchemasSslMethodSslMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) -class DestinationMssqlSslMethod(str, Enum): +class DestinationMssqlSchemasSslMethod(str, Enum): ENCRYPTED_TRUST_SERVER_CERTIFICATE = 'encrypted_trust_server_certificate' @@ -35,7 +35,20 @@ class DestinationMssqlSslMethod(str, Enum): @dataclasses.dataclass class EncryptedTrustServerCertificate: r"""Use the certificate provided by the server without verification. (For testing purposes only!)""" - SSL_METHOD: Final[Optional[DestinationMssqlSslMethod]] = dataclasses.field(default=DestinationMssqlSslMethod.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) + SSL_METHOD: Final[Optional[DestinationMssqlSchemasSslMethod]] = dataclasses.field(default=DestinationMssqlSchemasSslMethod.ENCRYPTED_TRUST_SERVER_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) + + + + +class DestinationMssqlSslMethod(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Unencrypted: + r"""The data transfer will not be encrypted.""" + SSL_METHOD: Final[Optional[DestinationMssqlSslMethod]] = dataclasses.field(default=DestinationMssqlSslMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method'), 'exclude': lambda f: f is None }}) @@ -125,6 +138,6 @@ class DestinationMssql: -SSLMethod = Union[EncryptedTrustServerCertificate, EncryptedVerifyCertificate] +SSLMethod = Union[Unencrypted, EncryptedTrustServerCertificate, EncryptedVerifyCertificate] DestinationMssqlSSHTunnelMethod = Union[DestinationMssqlNoTunnel, DestinationMssqlSSHKeyAuthentication, DestinationMssqlPasswordAuthentication] diff --git a/src/airbyte_api/models/destination_mysql.py b/src/airbyte_api/models/destination_mysql.py index 55eea757..3bf6f50c 100644 --- a/src/airbyte_api/models/destination_mysql.py +++ b/src/airbyte_api/models/destination_mysql.py @@ -90,6 +90,8 @@ class DestinationMysql: r"""Port of the database.""" raw_data_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('raw_data_schema'), 'exclude': lambda f: f is None }}) r"""The database to write raw tables into""" + ssl: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) + r"""Encrypt data using SSL.""" tunnel_method: Optional[DestinationMysqlSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" diff --git a/src/airbyte_api/models/destination_oracle.py b/src/airbyte_api/models/destination_oracle.py index cae87801..37d5460e 100644 --- a/src/airbyte_api/models/destination_oracle.py +++ b/src/airbyte_api/models/destination_oracle.py @@ -12,6 +12,56 @@ class Oracle(str, Enum): ORACLE = 'oracle' +class DestinationOracleSchemasEncryptionMethod(str, Enum): + ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class TLSEncryptedVerifyCertificate: + r"""Verify and use the certificate provided by the server.""" + ssl_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_certificate') }}) + r"""Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.""" + ENCRYPTION_METHOD: Final[Optional[DestinationOracleSchemasEncryptionMethod]] = dataclasses.field(default=DestinationOracleSchemasEncryptionMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class EncryptionAlgorithm(str, Enum): + r"""This parameter defines the database encryption algorithm.""" + AES256 = 'AES256' + RC4_56 = 'RC4_56' + THREE_DES168 = '3DES168' + + +class DestinationOracleEncryptionMethod(str, Enum): + CLIENT_NNE = 'client_nne' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class NativeNetworkEncryptionNNE: + r"""The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.""" + encryption_algorithm: Optional[EncryptionAlgorithm] = dataclasses.field(default=EncryptionAlgorithm.AES256, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }}) + r"""This parameter defines the database encryption algorithm.""" + ENCRYPTION_METHOD: Final[Optional[DestinationOracleEncryptionMethod]] = dataclasses.field(default=DestinationOracleEncryptionMethod.CLIENT_NNE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + +class EncryptionMethod(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationOracleUnencrypted: + r"""Data transfer will not be encrypted.""" + ENCRYPTION_METHOD: Final[Optional[EncryptionMethod]] = dataclasses.field(default=EncryptionMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method'), 'exclude': lambda f: f is None }}) + + + + class DestinationOracleSchemasTunnelMethodTunnelMethod(str, Enum): r"""Connect through a jump server tunnel host using username and password authentication""" SSH_PASSWORD_AUTH = 'SSH_PASSWORD_AUTH' @@ -80,6 +130,8 @@ class DestinationOracle: username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username to access the database. This user must have CREATE USER privileges in the database.""" DESTINATION_TYPE: Final[Oracle] = dataclasses.field(default=Oracle.ORACLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + encryption: Optional[Encryption] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption'), 'exclude': lambda f: f is None }}) + r"""The encryption method which is used when communicating with the database.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) @@ -95,4 +147,6 @@ class DestinationOracle: +Encryption = Union[DestinationOracleUnencrypted, NativeNetworkEncryptionNNE, TLSEncryptedVerifyCertificate] + DestinationOracleSSHTunnelMethod = Union[DestinationOracleNoTunnel, DestinationOracleSSHKeyAuthentication, DestinationOraclePasswordAuthentication] diff --git a/src/airbyte_api/models/destination_pgvector.py b/src/airbyte_api/models/destination_pgvector.py new file mode 100644 index 00000000..aa39a498 --- /dev/null +++ b/src/airbyte_api/models/destination_pgvector.py @@ -0,0 +1,247 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final, List, Optional, Union + + +class Pgvector(str, Enum): + PGVECTOR = 'pgvector' + + +class DestinationPgvectorSchemasEmbeddingEmbedding5Mode(str, Enum): + OPENAI_COMPATIBLE = 'openai_compatible' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorOpenAICompatible: + r"""Use a service that's compatible with the OpenAI API to embed text.""" + base_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('base_url') }}) + r"""The base URL for your OpenAI-compatible service""" + dimensions: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dimensions') }}) + r"""The number of dimensions the embedding model is generating""" + api_key: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key'), 'exclude': lambda f: f is None }}) + MODE: Final[Optional[DestinationPgvectorSchemasEmbeddingEmbedding5Mode]] = dataclasses.field(default=DestinationPgvectorSchemasEmbeddingEmbedding5Mode.OPENAI_COMPATIBLE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + model_name: Optional[str] = dataclasses.field(default='text-embedding-ada-002', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('model_name'), 'exclude': lambda f: f is None }}) + r"""The name of the model to use for embedding""" + + + + +class DestinationPgvectorSchemasEmbeddingEmbeddingMode(str, Enum): + AZURE_OPENAI = 'azure_openai' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorAzureOpenAI: + r"""Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.""" + api_base: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_base') }}) + r"""The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource""" + deployment: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('deployment') }}) + r"""The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource""" + openai_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('openai_key') }}) + r"""The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource""" + MODE: Final[Optional[DestinationPgvectorSchemasEmbeddingEmbeddingMode]] = dataclasses.field(default=DestinationPgvectorSchemasEmbeddingEmbeddingMode.AZURE_OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + + + + +class DestinationPgvectorSchemasEmbeddingMode(str, Enum): + FAKE = 'fake' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorFake: + r"""Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.""" + MODE: Final[Optional[DestinationPgvectorSchemasEmbeddingMode]] = dataclasses.field(default=DestinationPgvectorSchemasEmbeddingMode.FAKE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + + + + +class DestinationPgvectorSchemasMode(str, Enum): + COHERE = 'cohere' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorCohere: + r"""Use the Cohere API to embed text.""" + cohere_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('cohere_key') }}) + MODE: Final[Optional[DestinationPgvectorSchemasMode]] = dataclasses.field(default=DestinationPgvectorSchemasMode.COHERE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + + + + +class DestinationPgvectorMode(str, Enum): + OPENAI = 'openai' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorOpenAI: + r"""Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.""" + openai_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('openai_key') }}) + MODE: Final[Optional[DestinationPgvectorMode]] = dataclasses.field(default=DestinationPgvectorMode.OPENAI, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorCredentials: + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""Enter the password you want to use to access the database""" + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class PostgresConnection: + r"""Postgres can be used to store vector data and retrieve embeddings.""" + credentials: DestinationPgvectorCredentials = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) + database: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('database') }}) + r"""Enter the name of the database that you want to sync data into""" + default_schema: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('default_schema') }}) + r"""Enter the name of the default schema""" + host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) + r"""Enter the account name you want to use to access the database.""" + port: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port') }}) + r"""Enter the port you want to use to access the database""" + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + r"""Enter the name of the user you want to use to access the database""" + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorFieldNameMappingConfigModel: + from_field: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('from_field') }}) + r"""The field name in the source""" + to_field: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('to_field') }}) + r"""The field name to use in the destination""" + + + + +class DestinationPgvectorLanguage(str, Enum): + r"""Split code in suitable places based on the programming language""" + CPP = 'cpp' + GO = 'go' + JAVA = 'java' + JS = 'js' + PHP = 'php' + PROTO = 'proto' + PYTHON = 'python' + RST = 'rst' + RUBY = 'ruby' + RUST = 'rust' + SCALA = 'scala' + SWIFT = 'swift' + MARKDOWN = 'markdown' + LATEX = 'latex' + HTML = 'html' + SOL = 'sol' + + +class DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode(str, Enum): + CODE = 'code' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorByProgrammingLanguage: + r"""Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.""" + language: DestinationPgvectorLanguage = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('language') }}) + r"""Split code in suitable places based on the programming language""" + MODE: Final[Optional[DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode]] = dataclasses.field(default=DestinationPgvectorSchemasProcessingTextSplitterTextSplitterMode.CODE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + + + + +class DestinationPgvectorSchemasProcessingTextSplitterMode(str, Enum): + MARKDOWN = 'markdown' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorByMarkdownHeader: + r"""Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.""" + MODE: Final[Optional[DestinationPgvectorSchemasProcessingTextSplitterMode]] = dataclasses.field(default=DestinationPgvectorSchemasProcessingTextSplitterMode.MARKDOWN, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + split_level: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('split_level'), 'exclude': lambda f: f is None }}) + r"""Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points""" + + + + +class DestinationPgvectorSchemasProcessingMode(str, Enum): + SEPARATOR = 'separator' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorBySeparator: + r"""Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.""" + keep_separator: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('keep_separator'), 'exclude': lambda f: f is None }}) + r"""Whether to keep the separator in the resulting chunks""" + MODE: Final[Optional[DestinationPgvectorSchemasProcessingMode]] = dataclasses.field(default=DestinationPgvectorSchemasProcessingMode.SEPARATOR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mode'), 'exclude': lambda f: f is None }}) + separators: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('separators'), 'exclude': lambda f: f is None }}) + r"""List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use \\".\\". To split by a newline, use \\"\n\\".""" + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvectorProcessingConfigModel: + chunk_size: int = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('chunk_size') }}) + r"""Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)""" + chunk_overlap: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('chunk_overlap'), 'exclude': lambda f: f is None }}) + r"""Size of overlap between chunks in tokens to store in vector store to better capture relevant context""" + field_name_mappings: Optional[List[DestinationPgvectorFieldNameMappingConfigModel]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name_mappings'), 'exclude': lambda f: f is None }}) + r"""List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.""" + metadata_fields: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata_fields'), 'exclude': lambda f: f is None }}) + r"""List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.""" + text_fields: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('text_fields'), 'exclude': lambda f: f is None }}) + r"""List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array.""" + text_splitter: Optional[DestinationPgvectorTextSplitter] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('text_splitter'), 'exclude': lambda f: f is None }}) + r"""Split text fields into chunks based on the specified method.""" + + + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class DestinationPgvector: + r"""The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, + as well as to provide type safety for the configuration passed to the destination. + + The configuration model is composed of four parts: + * Processing configuration + * Embedding configuration + * Indexing configuration + * Advanced configuration + + Processing, embedding and advanced configuration are provided by this base class, while the indexing configuration is provided by the destination connector in the sub class. + """ + embedding: DestinationPgvectorEmbedding = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('embedding') }}) + r"""Embedding configuration""" + indexing: PostgresConnection = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('indexing') }}) + r"""Postgres can be used to store vector data and retrieve embeddings.""" + processing: DestinationPgvectorProcessingConfigModel = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing') }}) + DESTINATION_TYPE: Final[Pgvector] = dataclasses.field(default=Pgvector.PGVECTOR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('destinationType') }}) + omit_raw_text: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('omit_raw_text'), 'exclude': lambda f: f is None }}) + r"""Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source.""" + + + +DestinationPgvectorEmbedding = Union[DestinationPgvectorOpenAI, DestinationPgvectorCohere, DestinationPgvectorFake, DestinationPgvectorAzureOpenAI, DestinationPgvectorOpenAICompatible] + +DestinationPgvectorTextSplitter = Union[DestinationPgvectorBySeparator, DestinationPgvectorByMarkdownHeader, DestinationPgvectorByProgrammingLanguage] diff --git a/src/airbyte_api/models/destination_postgres.py b/src/airbyte_api/models/destination_postgres.py index b983f63b..6b1695d9 100644 --- a/src/airbyte_api/models/destination_postgres.py +++ b/src/airbyte_api/models/destination_postgres.py @@ -184,6 +184,8 @@ class DestinationPostgres: r"""The schema to write raw tables into""" schema: Optional[str] = dataclasses.field(default='public', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }}) r"""The default schema tables are written to if the source does not specify a namespace. The usual value for this field is \\"public\\".""" + ssl: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) + r"""Encrypt data using SSL. When activating SSL, please select one of the connection modes.""" ssl_mode: Optional[SSLModes] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_mode'), 'exclude': lambda f: f is None }}) r"""SSL connection modes. disable - Chose this mode to disable encryption of communication between Airbyte and destination database diff --git a/src/airbyte_api/models/destinationconfiguration.py b/src/airbyte_api/models/destinationconfiguration.py index c5e0384e..569ecf16 100644 --- a/src/airbyte_api/models/destinationconfiguration.py +++ b/src/airbyte_api/models/destinationconfiguration.py @@ -8,7 +8,6 @@ from .destination_clickhouse import DestinationClickhouse from .destination_convex import DestinationConvex from .destination_databricks import DestinationDatabricks -from .destination_dev_null import DestinationDevNull from .destination_duckdb import DestinationDuckdb from .destination_dynamodb import DestinationDynamodb from .destination_elasticsearch import DestinationElasticsearch @@ -21,6 +20,7 @@ from .destination_mssql import DestinationMssql from .destination_mysql import DestinationMysql from .destination_oracle import DestinationOracle +from .destination_pgvector import DestinationPgvector from .destination_pinecone import DestinationPinecone from .destination_postgres import DestinationPostgres from .destination_pubsub import DestinationPubsub @@ -40,4 +40,4 @@ from .destination_yellowbrick import DestinationYellowbrick from typing import Union -DestinationConfiguration = Union[DestinationGoogleSheets, DestinationAstra, DestinationAwsDatalake, DestinationAzureBlobStorage, DestinationBigquery, DestinationClickhouse, DestinationConvex, DestinationDatabricks, DestinationDevNull, DestinationDuckdb, DestinationDynamodb, DestinationElasticsearch, DestinationFirebolt, DestinationFirestore, DestinationGcs, DestinationMilvus, DestinationMongodb, DestinationMssql, DestinationMysql, DestinationOracle, DestinationPinecone, DestinationPostgres, DestinationPubsub, DestinationQdrant, DestinationRedis, DestinationRedshift, DestinationS3, DestinationS3Glue, DestinationSftpJSON, DestinationSnowflake, DestinationSnowflakeCortex, DestinationTeradata, DestinationTimeplus, DestinationTypesense, DestinationVectara, DestinationWeaviate, DestinationYellowbrick] +DestinationConfiguration = Union[DestinationGoogleSheets, DestinationAstra, DestinationAwsDatalake, DestinationAzureBlobStorage, DestinationBigquery, DestinationClickhouse, DestinationConvex, DestinationDatabricks, DestinationDuckdb, DestinationDynamodb, DestinationElasticsearch, DestinationFirebolt, DestinationFirestore, DestinationGcs, DestinationMilvus, DestinationMongodb, DestinationMssql, DestinationMysql, DestinationOracle, DestinationPgvector, DestinationPinecone, DestinationPostgres, DestinationPubsub, DestinationQdrant, DestinationRedis, DestinationRedshift, DestinationS3, DestinationS3Glue, DestinationSftpJSON, DestinationSnowflake, DestinationSnowflakeCortex, DestinationTeradata, DestinationTimeplus, DestinationTypesense, DestinationVectara, DestinationWeaviate, DestinationYellowbrick] diff --git a/src/airbyte_api/models/source_7shifts.py b/src/airbyte_api/models/source_7shifts.py new file mode 100644 index 00000000..edbc73d8 --- /dev/null +++ b/src/airbyte_api/models/source_7shifts.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Sevenshifts(str, Enum): + SEVENSHIFTS = '7shifts' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Source7shifts: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Access token to use for authentication. Generate it in the 7shifts Developer Tools.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Sevenshifts] = dataclasses.field(default=Sevenshifts.SEVENSHIFTS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_amazon_seller_partner.py b/src/airbyte_api/models/source_amazon_seller_partner.py index 8fdb8845..084e719b 100644 --- a/src/airbyte_api/models/source_amazon_seller_partner.py +++ b/src/airbyte_api/models/source_amazon_seller_partner.py @@ -99,6 +99,14 @@ class ReportName(str, Enum): GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL = 'GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL' GET_XML_BROWSE_TREE_DATA = 'GET_XML_BROWSE_TREE_DATA' GET_VENDOR_REAL_TIME_INVENTORY_REPORT = 'GET_VENDOR_REAL_TIME_INVENTORY_REPORT' + GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT = 'GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT' + GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT = 'GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT' + GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT = 'GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT' + GET_SALES_AND_TRAFFIC_REPORT = 'GET_SALES_AND_TRAFFIC_REPORT' + GET_VENDOR_SALES_REPORT = 'GET_VENDOR_SALES_REPORT' + GET_VENDOR_INVENTORY_REPORT = 'GET_VENDOR_INVENTORY_REPORT' + GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT = 'GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT' + GET_VENDOR_TRAFFIC_REPORT = 'GET_VENDOR_TRAFFIC_REPORT' @dataclass_json(undefined=Undefined.EXCLUDE) diff --git a/src/airbyte_api/models/source_appcues.py b/src/airbyte_api/models/source_appcues.py new file mode 100644 index 00000000..3bef1723 --- /dev/null +++ b/src/airbyte_api/models/source_appcues.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Appcues(str, Enum): + APPCUES = 'appcues' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAppcues: + account_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('account_id') }}) + r"""Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Appcues] = dataclasses.field(default=Appcues.APPCUES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_appfigures.py b/src/airbyte_api/models/source_appfigures.py new file mode 100644 index 00000000..37a7fd99 --- /dev/null +++ b/src/airbyte_api/models/source_appfigures.py @@ -0,0 +1,36 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class GroupBy(str, Enum): + r"""Category term for grouping the search results""" + NETWORK = 'network' + PRODUCT = 'product' + COUNTRY = 'country' + DATE = 'date' + + +class Appfigures(str, Enum): + APPFIGURES = 'appfigures' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceAppfigures: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + group_by: Optional[GroupBy] = dataclasses.field(default=GroupBy.PRODUCT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('group_by'), 'exclude': lambda f: f is None }}) + r"""Category term for grouping the search results""" + search_store: Optional[str] = dataclasses.field(default='apple', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_store'), 'exclude': lambda f: f is None }}) + r"""The store which needs to be searched in streams""" + SOURCE_TYPE: Final[Appfigures] = dataclasses.field(default=Appfigures.APPFIGURES, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_asana.py b/src/airbyte_api/models/source_asana.py index 6b8b0459..e4d6d41e 100644 --- a/src/airbyte_api/models/source_asana.py +++ b/src/airbyte_api/models/source_asana.py @@ -53,8 +53,6 @@ class SourceAsana: organization_export_ids: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_export_ids'), 'exclude': lambda f: f is None }}) r"""Globally unique identifiers for the organization exports""" SOURCE_TYPE: Final[Optional[SourceAsanaAsana]] = dataclasses.field(default=SourceAsanaAsana.ASANA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) - test_mode: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('test_mode'), 'exclude': lambda f: f is None }}) - r"""This flag is used for testing purposes for certain streams that return a lot of data. This flag is not meant to be enabled for prod.""" diff --git a/src/airbyte_api/models/source_azure_blob_storage.py b/src/airbyte_api/models/source_azure_blob_storage.py index 937cc8cc..9aec3e5b 100644 --- a/src/airbyte_api/models/source_azure_blob_storage.py +++ b/src/airbyte_api/models/source_azure_blob_storage.py @@ -156,12 +156,6 @@ class FromCSV: -class InferenceType(str, Enum): - r"""How to infer the types of the columns. If none, inference default to strings.""" - NONE = 'None' - PRIMITIVE_TYPES_ONLY = 'Primitive Types Only' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class CSVFormat: @@ -180,8 +174,6 @@ class CSVFormat: r"""How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.""" ignore_errors_on_fields_mismatch: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ignore_errors_on_fields_mismatch'), 'exclude': lambda f: f is None }}) r"""Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.""" - inference_type: Optional[InferenceType] = dataclasses.field(default=InferenceType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('inference_type'), 'exclude': lambda f: f is None }}) - r"""How to infer the types of the columns. If none, inference default to strings.""" null_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('null_values'), 'exclude': lambda f: f is None }}) r"""A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.""" quote_char: Optional[str] = dataclasses.field(default='"', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('quote_char'), 'exclude': lambda f: f is None }}) @@ -198,7 +190,7 @@ class CSVFormat: -class SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype(str, Enum): +class Filetype(str, Enum): AVRO = 'avro' @@ -207,7 +199,7 @@ class SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype(str, Enum): class AvroFormat: double_as_string: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('double_as_string'), 'exclude': lambda f: f is None }}) r"""Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.""" - FILETYPE: Final[Optional[SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceAzureBlobStorageSchemasStreamsFormatFormatFiletype.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + FILETYPE: Final[Optional[Filetype]] = dataclasses.field(default=Filetype.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) @@ -232,10 +224,6 @@ class FileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - legacy_prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('legacy_prefix'), 'exclude': lambda f: f is None }}) - r"""The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) r"""When enabled, syncs will not validate or structure records against the stream's schema.""" validation_policy: Optional[ValidationPolicy] = dataclasses.field(default=ValidationPolicy.EMIT_RECORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('validation_policy'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_bitly.py b/src/airbyte_api/models/source_bitly.py new file mode 100644 index 00000000..49df9b47 --- /dev/null +++ b/src/airbyte_api/models/source_bitly.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Bitly(str, Enum): + BITLY = 'bitly' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBitly: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + end_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Bitly] = dataclasses.field(default=Bitly.BITLY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_brevo.py b/src/airbyte_api/models/source_brevo.py new file mode 100644 index 00000000..35757ab8 --- /dev/null +++ b/src/airbyte_api/models/source_brevo.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Brevo(str, Enum): + BREVO = 'brevo' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBrevo: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Brevo] = dataclasses.field(default=Brevo.BREVO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_buildkite.py b/src/airbyte_api/models/source_buildkite.py new file mode 100644 index 00000000..b927e41a --- /dev/null +++ b/src/airbyte_api/models/source_buildkite.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Buildkite(str, Enum): + BUILDKITE = 'buildkite' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBuildkite: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Buildkite] = dataclasses.field(default=Buildkite.BUILDKITE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_buzzsprout.py b/src/airbyte_api/models/source_buzzsprout.py new file mode 100644 index 00000000..efe90ea1 --- /dev/null +++ b/src/airbyte_api/models/source_buzzsprout.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Buzzsprout(str, Enum): + BUZZSPROUT = 'buzzsprout' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceBuzzsprout: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + podcast_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('podcast_id') }}) + r"""Podcast ID found in `https://www.buzzsprout.com/my/profile/api`""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Buzzsprout] = dataclasses.field(default=Buzzsprout.BUZZSPROUT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_canny.py b/src/airbyte_api/models/source_canny.py new file mode 100644 index 00000000..0c8bb70f --- /dev/null +++ b/src/airbyte_api/models/source_canny.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Canny(str, Enum): + CANNY = 'canny' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCanny: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""You can find your secret API key in Your Canny Subdomain > Settings > API""" + SOURCE_TYPE: Final[Canny] = dataclasses.field(default=Canny.CANNY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_chameleon.py b/src/airbyte_api/models/source_chameleon.py new file mode 100644 index 00000000..9694d294 --- /dev/null +++ b/src/airbyte_api/models/source_chameleon.py @@ -0,0 +1,36 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Filter(str, Enum): + r"""Filter for using in the `segments_experiences` stream""" + TOUR = 'tour' + SURVEY = 'survey' + LAUNCHER = 'launcher' + + +class Chameleon(str, Enum): + CHAMELEON = 'chameleon' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceChameleon: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + end_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + filter_: Optional[Filter] = dataclasses.field(default=Filter.TOUR, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter'), 'exclude': lambda f: f is None }}) + r"""Filter for using in the `segments_experiences` stream""" + limit: Optional[str] = dataclasses.field(default='50', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('limit'), 'exclude': lambda f: f is None }}) + r"""Max records per page limit""" + SOURCE_TYPE: Final[Chameleon] = dataclasses.field(default=Chameleon.CHAMELEON, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_cimis.py b/src/airbyte_api/models/source_cimis.py new file mode 100644 index 00000000..ed3c0760 --- /dev/null +++ b/src/airbyte_api/models/source_cimis.py @@ -0,0 +1,42 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Any, Final, List, Optional + + +class Cimis(str, Enum): + CIMIS = 'cimis' + + +class TargetsType(str, Enum): + WSN_STATION_NUMBERS = 'WSN station numbers' + CALIFORNIA_ZIP_CODES = 'California zip codes' + DECIMAL_DEGREE_COORDINATES = 'decimal-degree coordinates' + STREET_ADDRESSES = 'street addresses' + + +class UnitOfMeasure(str, Enum): + E = 'E' + M = 'M' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceCimis: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + end_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + targets: List[Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('targets') }}) + targets_type: TargetsType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('targets_type') }}) + daily_data_items: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('daily_data_items'), 'exclude': lambda f: f is None }}) + hourly_data_items: Optional[List[Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('hourly_data_items'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Cimis] = dataclasses.field(default=Cimis.CIMIS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + unit_of_measure: Optional[UnitOfMeasure] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('unit_of_measure'), 'exclude': lambda f: f is None }}) + + diff --git a/src/airbyte_api/models/source_clickhouse.py b/src/airbyte_api/models/source_clickhouse.py index 73d803a2..54fde191 100644 --- a/src/airbyte_api/models/source_clickhouse.py +++ b/src/airbyte_api/models/source_clickhouse.py @@ -86,6 +86,8 @@ class SourceClickhouse: port: Optional[int] = dataclasses.field(default=8123, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) r"""The port of the database.""" SOURCE_TYPE: Final[SourceClickhouseClickhouse] = dataclasses.field(default=SourceClickhouseClickhouse.CLICKHOUSE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + ssl: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) + r"""Encrypt data using SSL.""" tunnel_method: Optional[SourceClickhouseSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) r"""Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.""" diff --git a/src/airbyte_api/models/source_e2e_test_cloud.py b/src/airbyte_api/models/source_e2e_test_cloud.py deleted file mode 100644 index 48564bf9..00000000 --- a/src/airbyte_api/models/source_e2e_test_cloud.py +++ /dev/null @@ -1,71 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Any, Dict, Final, Optional, Union - - -class SourceE2eTestCloudType(str, Enum): - MULTI_STREAM = 'MULTI_STREAM' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class MultiSchema: - r"""A catalog with multiple data streams, each with a different schema.""" - stream_schemas: Optional[str] = dataclasses.field(default='{ "stream1": { "type": "object", "properties": { "field1": { "type": "string" } } }, "stream2": { "type": "object", "properties": { "field1": { "type": "boolean" } } } }', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_schemas'), 'exclude': lambda f: f is None }}) - r"""A Json object specifying multiple data streams and their schemas. Each key in this object is one stream name. Each value is the schema for that stream. The schema should be compatible with draft-07. See this doc for examples.""" - TYPE: Final[Optional[SourceE2eTestCloudType]] = dataclasses.field(default=SourceE2eTestCloudType.MULTI_STREAM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type'), 'exclude': lambda f: f is None }}) - - - - -class SourceE2eTestCloudSchemasType(str, Enum): - SINGLE_STREAM = 'SINGLE_STREAM' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SingleSchema: - r"""A catalog with one or multiple streams that share the same schema.""" - stream_duplication: Optional[int] = dataclasses.field(default=1, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_duplication'), 'exclude': lambda f: f is None }}) - r"""Duplicate the stream for easy load testing. Each stream name will have a number suffix. For example, if the stream name is \\"ds\\", the duplicated streams will be \\"ds_0\\", \\"ds_1\\", etc.""" - stream_name: Optional[str] = dataclasses.field(default='data_stream', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_name'), 'exclude': lambda f: f is None }}) - r"""Name of the data stream.""" - stream_schema: Optional[str] = dataclasses.field(default='{ "type": "object", "properties": { "column1": { "type": "string" } } }', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stream_schema'), 'exclude': lambda f: f is None }}) - r"""A Json schema for the stream. The schema should be compatible with draft-07. See this doc for examples.""" - TYPE: Final[Optional[SourceE2eTestCloudSchemasType]] = dataclasses.field(default=SourceE2eTestCloudSchemasType.SINGLE_STREAM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type'), 'exclude': lambda f: f is None }}) - - - - -class E2eTestCloud(str, Enum): - E2E_TEST_CLOUD = 'e2e-test-cloud' - - -class Type(str, Enum): - CONTINUOUS_FEED = 'CONTINUOUS_FEED' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ContinuousFeed: - mock_catalog: MockCatalog = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('mock_catalog') }}) - additional_properties: Optional[Dict[str, Any]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'exclude': lambda f: f is None }}) - max_messages: Optional[int] = dataclasses.field(default=100, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_messages'), 'exclude': lambda f: f is None }}) - r"""Number of records to emit per stream. Min 1. Max 100 billion.""" - message_interval_ms: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message_interval_ms'), 'exclude': lambda f: f is None }}) - r"""Interval between messages in ms. Min 0 ms. Max 60000 ms (1 minute).""" - seed: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('seed'), 'exclude': lambda f: f is None }}) - r"""When the seed is unspecified, the current time millis will be used as the seed. Range: [0, 1000000].""" - SOURCE_TYPE: Final[Optional[E2eTestCloud]] = dataclasses.field(default=E2eTestCloud.E2E_TEST_CLOUD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType'), 'exclude': lambda f: f is None }}) - TYPE: Final[Optional[Type]] = dataclasses.field(default=Type.CONTINUOUS_FEED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type'), 'exclude': lambda f: f is None }}) - - - -MockCatalog = Union[SingleSchema, MultiSchema] - -SourceE2eTestCloud = Union[ContinuousFeed] diff --git a/src/airbyte_api/models/source_ezofficeinventory.py b/src/airbyte_api/models/source_ezofficeinventory.py new file mode 100644 index 00000000..7d3ed4ca --- /dev/null +++ b/src/airbyte_api/models/source_ezofficeinventory.py @@ -0,0 +1,28 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Ezofficeinventory(str, Enum): + EZOFFICEINVENTORY = 'ezofficeinventory' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceEzofficeinventory: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + r"""Your EZOfficeInventory Access Token. API Access is disabled by default. Enable API Access in Settings > Integrations > API Integration and click on Update to generate a new access token""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + r"""Earliest date you want to sync historical streams (inventory_histories, asset_histories, asset_stock_histories) from""" + subdomain: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('subdomain') }}) + r"""The company name used in signup, also visible in the URL when logged in.""" + SOURCE_TYPE: Final[Ezofficeinventory] = dataclasses.field(default=Ezofficeinventory.EZOFFICEINVENTORY, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_facebook_marketing.py b/src/airbyte_api/models/source_facebook_marketing.py index cc87cdf4..307f99f1 100644 --- a/src/airbyte_api/models/source_facebook_marketing.py +++ b/src/airbyte_api/models/source_facebook_marketing.py @@ -334,18 +334,12 @@ class SourceFacebookMarketing: r"""Credentials for connecting to the Facebook Marketing API""" access_token: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token'), 'exclude': lambda f: f is None }}) r"""The value of the generated access token. From your App’s Dashboard, click on \\"Marketing API\\" then \\"Tools\\". Select permissions ads_management, ads_read, read_insights, business_management. Then click on \\"Get token\\". See the docs for more information.""" - action_breakdowns_allow_empty: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('action_breakdowns_allow_empty'), 'exclude': lambda f: f is None }}) - r"""Allows action_breakdowns to be an empty list""" ad_statuses: Optional[List[ValidAdStatuses]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ad_statuses'), 'exclude': lambda f: f is None }}) r"""Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.""" adset_statuses: Optional[List[ValidAdSetStatuses]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('adset_statuses'), 'exclude': lambda f: f is None }}) r"""Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.""" campaign_statuses: Optional[List[ValidCampaignStatuses]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('campaign_statuses'), 'exclude': lambda f: f is None }}) r"""Select the statuses you want to be loaded in the stream. If no specific statuses are selected, the API's default behavior applies, and some statuses may be filtered out.""" - client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) - r"""The Client Id for your OAuth app""" - client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""The Client Secret for your OAuth app""" custom_insights: Optional[List[InsightConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_insights'), 'exclude': lambda f: f is None }}) r"""A list which contains ad statistics entries, each entry must have a name and can contains fields, breakdowns or action_breakdowns. Click on \\"add\\" to fill this field.""" end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_file.py b/src/airbyte_api/models/source_file.py index 273b55be..f0374bbd 100644 --- a/src/airbyte_api/models/source_file.py +++ b/src/airbyte_api/models/source_file.py @@ -21,6 +21,20 @@ class FileFormat(str, Enum): YAML = 'yaml' +class SourceFileSchemasProviderStorageProvider8Storage(str, Enum): + r"""WARNING: Note that the local storage URL available for reading must start with the local mount \\"/local/\\" at the moment until we implement more advanced docker mounting options.""" + LOCAL = 'local' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class LocalFilesystemLimited: + STORAGE: Final[SourceFileSchemasProviderStorageProvider8Storage] = dataclasses.field(default=SourceFileSchemasProviderStorageProvider8Storage.LOCAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('storage') }}) + r"""WARNING: Note that the local storage URL available for reading must start with the local mount \\"/local/\\" at the moment until we implement more advanced docker mounting options.""" + + + + class SourceFileSchemasProviderStorageProvider7Storage(str, Enum): SFTP = 'SFTP' @@ -93,7 +107,7 @@ class SourceFileSchemasStorage(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceFileS3AmazonWebServices: +class S3AmazonWebServices: aws_access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_access_key_id'), 'exclude': lambda f: f is None }}) r"""In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" aws_secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_access_key'), 'exclude': lambda f: f is None }}) @@ -152,4 +166,4 @@ class SourceFile: -StorageProvider = Union[HTTPSPublicWeb, GCSGoogleCloudStorage, SourceFileS3AmazonWebServices, AzBlobAzureBlobStorage, SSHSecureShell, SCPSecureCopyProtocol, SFTPSecureFileTransferProtocol] +StorageProvider = Union[HTTPSPublicWeb, GCSGoogleCloudStorage, S3AmazonWebServices, AzBlobAzureBlobStorage, SSHSecureShell, SCPSecureCopyProtocol, SFTPSecureFileTransferProtocol, LocalFilesystemLimited] diff --git a/src/airbyte_api/models/source_front.py b/src/airbyte_api/models/source_front.py new file mode 100644 index 00000000..319324ba --- /dev/null +++ b/src/airbyte_api/models/source_front.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Front(str, Enum): + FRONT = 'front' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceFront: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + page_limit: Optional[str] = dataclasses.field(default='50', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_limit'), 'exclude': lambda f: f is None }}) + r"""Page limit for the responses""" + SOURCE_TYPE: Final[Front] = dataclasses.field(default=Front.FRONT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_gcs.py b/src/airbyte_api/models/source_gcs.py index c685c5ce..5bc584b7 100644 --- a/src/airbyte_api/models/source_gcs.py +++ b/src/airbyte_api/models/source_gcs.py @@ -164,12 +164,6 @@ class SourceGcsFromCSV: -class SourceGcsInferenceType(str, Enum): - r"""How to infer the types of the columns. If none, inference default to strings.""" - NONE = 'None' - PRIMITIVE_TYPES_ONLY = 'Primitive Types Only' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceGcsCSVFormat: @@ -188,8 +182,6 @@ class SourceGcsCSVFormat: r"""How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.""" ignore_errors_on_fields_mismatch: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ignore_errors_on_fields_mismatch'), 'exclude': lambda f: f is None }}) r"""Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.""" - inference_type: Optional[SourceGcsInferenceType] = dataclasses.field(default=SourceGcsInferenceType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('inference_type'), 'exclude': lambda f: f is None }}) - r"""How to infer the types of the columns. If none, inference default to strings.""" null_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('null_values'), 'exclude': lambda f: f is None }}) r"""A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.""" quote_char: Optional[str] = dataclasses.field(default='"', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('quote_char'), 'exclude': lambda f: f is None }}) @@ -240,10 +232,6 @@ class SourceGcsFileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - legacy_prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('legacy_prefix'), 'exclude': lambda f: f is None }}) - r"""The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) r"""The number of resent files which will be used to discover the schema for this stream.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_github.py b/src/airbyte_api/models/source_github.py index 476fcda6..5e15d04f 100644 --- a/src/airbyte_api/models/source_github.py +++ b/src/airbyte_api/models/source_github.py @@ -55,14 +55,10 @@ class SourceGithub: r"""List of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/a* for matching multiple repositories by pattern.""" api_url: Optional[str] = dataclasses.field(default='https://api.github.com/', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_url'), 'exclude': lambda f: f is None }}) r"""Please enter your basic URL from self-hosted GitHub instance or leave it empty to use GitHub.""" - branch: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('branch'), 'exclude': lambda f: f is None }}) - r"""(DEPRCATED) Space-delimited list of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.""" branches: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('branches'), 'exclude': lambda f: f is None }}) r"""List of GitHub repository branches to pull commits for, e.g. `airbytehq/airbyte/master`. If no branches are specified for a repository, the default branch will be pulled.""" max_waiting_time: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_waiting_time'), 'exclude': lambda f: f is None }}) r"""Max Waiting Time for rate limit. Set higher value to wait till rate limits will be resetted to continue sync""" - repository: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('repository'), 'exclude': lambda f: f is None }}) - r"""(DEPRCATED) Space-delimited list of GitHub organizations/repositories, e.g. `airbytehq/airbyte` for single repository, `airbytehq/*` for get all repositories from organization and `airbytehq/airbyte airbytehq/another-repo` for multiple repositories.""" SOURCE_TYPE: Final[SourceGithubGithub] = dataclasses.field(default=SourceGithubGithub.GITHUB, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date from which you'd like to replicate data from GitHub in the format YYYY-MM-DDT00:00:00Z. If the date is not set, all data will be replicated. For the streams which support this configuration, only data generated on or after the start date will be replicated. This field doesn't apply to all streams, see the docs for more info""" diff --git a/src/airbyte_api/models/source_gitlab.py b/src/airbyte_api/models/source_gitlab.py index 1cca4d39..fc85ee2b 100644 --- a/src/airbyte_api/models/source_gitlab.py +++ b/src/airbyte_api/models/source_gitlab.py @@ -56,12 +56,8 @@ class SourceGitlab: credentials: SourceGitlabAuthorizationMethod = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials') }}) api_url: Optional[str] = dataclasses.field(default='gitlab.com', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_url'), 'exclude': lambda f: f is None }}) r"""Please enter your basic URL from GitLab instance.""" - groups: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('groups'), 'exclude': lambda f: f is None }}) - r"""[DEPRECATED] Space-delimited list of groups. e.g. airbyte.io.""" groups_list: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('groups_list'), 'exclude': lambda f: f is None }}) r"""List of groups. e.g. airbyte.io.""" - projects: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('projects'), 'exclude': lambda f: f is None }}) - r"""[DEPRECATED] Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.""" projects_list: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('projects_list'), 'exclude': lambda f: f is None }}) r"""Space-delimited list of projects. e.g. airbyte.io/documentation meltano/tap-gitlab.""" SOURCE_TYPE: Final[SourceGitlabGitlab] = dataclasses.field(default=SourceGitlabGitlab.GITLAB, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_google_analytics_data_api.py b/src/airbyte_api/models/source_google_analytics_data_api.py index 60464b77..1ddf43f8 100644 --- a/src/airbyte_api/models/source_google_analytics_data_api.py +++ b/src/airbyte_api/models/source_google_analytics_data_api.py @@ -288,10 +288,10 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType(str, Enum) @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class Filter: +class SourceGoogleAnalyticsDataAPIFilter: r"""A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all dimensions.""" field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name') }}) - filter_: SourceGoogleAnalyticsDataAPISchemasFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) + filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) FILTER_TYPE: Final[Optional[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType]] = dataclasses.field(default=SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilterType.FILTER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter_type'), 'exclude': lambda f: f is None }}) @@ -455,7 +455,7 @@ class SourceGoogleAnalyticsDataAPISchemasStringFilter: @dataclasses.dataclass class SourceGoogleAnalyticsDataAPISchemasExpression: field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name') }}) - filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) + filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) @@ -632,7 +632,7 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterString @dataclasses.dataclass class SourceGoogleAnalyticsDataAPIExpression: field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name') }}) - filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) + filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) @@ -809,7 +809,7 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter: @dataclasses.dataclass class Expression: field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name') }}) - filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) + filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) @@ -988,10 +988,10 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFi @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class SourceGoogleAnalyticsDataAPIFilter: +class SourceGoogleAnalyticsDataAPISchemasFilter: r"""A primitive filter. In the same FilterExpression, all of the filter's field names need to be either all metrics.""" field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name') }}) - filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) + filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) FILTER_TYPE: Final[Optional[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType]] = dataclasses.field(default=SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter4FilterType.FILTER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter_type'), 'exclude': lambda f: f is None }}) @@ -1509,7 +1509,7 @@ class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFil @dataclasses.dataclass class SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayExpression: field_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('field_name') }}) - filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) + filter_: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filter') }}) @@ -1584,7 +1584,7 @@ class SourceGoogleAnalyticsDataAPI: Value = Union[Int64Value, DoubleValue] -SourceGoogleAnalyticsDataAPISchemasFilter = Union[StringFilter, InListFilter, NumericFilter, BetweenFilter] +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter = Union[StringFilter, InListFilter, NumericFilter, BetweenFilter] SourceGoogleAnalyticsDataAPISchemasFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3ExpressionDoubleValue] @@ -1592,7 +1592,7 @@ class SourceGoogleAnalyticsDataAPI: SourceGoogleAnalyticsDataAPISchemasValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3DoubleValue] -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasStringFilter, SourceGoogleAnalyticsDataAPISchemasInListFilter, SourceGoogleAnalyticsDataAPISchemasNumericFilter, SourceGoogleAnalyticsDataAPISchemasBetweenFilter] +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter3Filter = Union[SourceGoogleAnalyticsDataAPISchemasStringFilter, SourceGoogleAnalyticsDataAPISchemasInListFilter, SourceGoogleAnalyticsDataAPISchemasNumericFilter, SourceGoogleAnalyticsDataAPISchemasBetweenFilter] SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDoubleValue] @@ -1600,7 +1600,7 @@ class SourceGoogleAnalyticsDataAPI: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter2DoubleValue] -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter] +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterBetweenFilter] SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1ExpressionsDoubleValue] @@ -1608,9 +1608,9 @@ class SourceGoogleAnalyticsDataAPI: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Int64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1DoubleValue] -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterDimensionsFilter1Filter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter] +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDimensionFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayBetweenFilter] -DimensionsFilter = Union[AndGroup, OrGroup, NotExpression, Filter] +DimensionsFilter = Union[AndGroup, OrGroup, NotExpression, SourceGoogleAnalyticsDataAPIFilter] SourceGoogleAnalyticsDataAPIFromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterDoubleValue] @@ -1618,7 +1618,7 @@ class SourceGoogleAnalyticsDataAPI: SourceGoogleAnalyticsDataAPIValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayDoubleValue] -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayFilter = Union[SourceGoogleAnalyticsDataAPIStringFilter, SourceGoogleAnalyticsDataAPIInListFilter, SourceGoogleAnalyticsDataAPINumericFilter, SourceGoogleAnalyticsDataAPIBetweenFilter] +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter = Union[SourceGoogleAnalyticsDataAPIStringFilter, SourceGoogleAnalyticsDataAPIInListFilter, SourceGoogleAnalyticsDataAPINumericFilter, SourceGoogleAnalyticsDataAPIBetweenFilter] SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3FromValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter3ExpressionDoubleValue] @@ -1642,6 +1642,6 @@ class SourceGoogleAnalyticsDataAPI: SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterValue = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsInt64Value, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1ExpressionsDoubleValue] -SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterFilter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter] +SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterMetricsFilter1Filter = Union[SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterStringFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterInListFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterNumericFilter, SourceGoogleAnalyticsDataAPISchemasCustomReportsArrayMetricFilterBetweenFilter] -MetricsFilter = Union[SourceGoogleAnalyticsDataAPIAndGroup, SourceGoogleAnalyticsDataAPIOrGroup, SourceGoogleAnalyticsDataAPINotExpression, SourceGoogleAnalyticsDataAPIFilter] +MetricsFilter = Union[SourceGoogleAnalyticsDataAPIAndGroup, SourceGoogleAnalyticsDataAPIOrGroup, SourceGoogleAnalyticsDataAPINotExpression, SourceGoogleAnalyticsDataAPISchemasFilter] diff --git a/src/airbyte_api/models/source_google_drive.py b/src/airbyte_api/models/source_google_drive.py index 03299f4b..0fe8f92b 100644 --- a/src/airbyte_api/models/source_google_drive.py +++ b/src/airbyte_api/models/source_google_drive.py @@ -222,8 +222,6 @@ class SourceGoogleDriveFileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) r"""When enabled, syncs will not validate or structure records against the stream's schema.""" validation_policy: Optional[SourceGoogleDriveValidationPolicy] = dataclasses.field(default=SourceGoogleDriveValidationPolicy.EMIT_RECORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('validation_policy'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_google_search_console.py b/src/airbyte_api/models/source_google_search_console.py index 1aae40a3..cc7400a8 100644 --- a/src/airbyte_api/models/source_google_search_console.py +++ b/src/airbyte_api/models/source_google_search_console.py @@ -82,8 +82,6 @@ class SourceGoogleSearchConsole: authorization: AuthenticationType = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('authorization') }}) site_urls: List[str] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_urls') }}) r"""The URLs of the website property attached to your GSC account. Learn more about properties here.""" - custom_reports: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports'), 'exclude': lambda f: f is None }}) - r"""(DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.""" custom_reports_array: Optional[List[SourceGoogleSearchConsoleCustomReportConfig]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('custom_reports_array'), 'exclude': lambda f: f is None }}) r"""You can add your Custom Analytics report by creating one.""" data_state: Optional[DataFreshness] = dataclasses.field(default=DataFreshness.FINAL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_state'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_google_tasks.py b/src/airbyte_api/models/source_google_tasks.py new file mode 100644 index 00000000..81ac36ff --- /dev/null +++ b/src/airbyte_api/models/source_google_tasks.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class GoogleTasks(str, Enum): + GOOGLE_TASKS = 'google-tasks' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGoogleTasks: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + records_limit: Optional[str] = dataclasses.field(default='50', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('records_limit'), 'exclude': lambda f: f is None }}) + r"""The maximum number of records to be returned per request""" + SOURCE_TYPE: Final[GoogleTasks] = dataclasses.field(default=GoogleTasks.GOOGLE_TASKS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_guru.py b/src/airbyte_api/models/source_guru.py new file mode 100644 index 00000000..ecd5ea35 --- /dev/null +++ b/src/airbyte_api/models/source_guru.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Guru(str, Enum): + GURU = 'guru' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceGuru: + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + search_cards_query: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_cards_query'), 'exclude': lambda f: f is None }}) + r"""Query for searching cards""" + SOURCE_TYPE: Final[Guru] = dataclasses.field(default=Guru.GURU, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + team_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('team_id'), 'exclude': lambda f: f is None }}) + r"""Team ID received through response of /teams streams, make sure about access to the team""" + + diff --git a/src/airbyte_api/models/source_harvest.py b/src/airbyte_api/models/source_harvest.py index a6403fea..8fdbee34 100644 --- a/src/airbyte_api/models/source_harvest.py +++ b/src/airbyte_api/models/source_harvest.py @@ -57,8 +57,6 @@ class SourceHarvest: r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.""" credentials: Optional[SourceHarvestAuthenticationMechanism] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) r"""Choose how to authenticate to Harvest.""" - replication_end_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('replication_end_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) - r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any data after this date will not be replicated.""" SOURCE_TYPE: Final[Harvest] = dataclasses.field(default=Harvest.HARVEST, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_height.py b/src/airbyte_api/models/source_height.py new file mode 100644 index 00000000..d5330a81 --- /dev/null +++ b/src/airbyte_api/models/source_height.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Height(str, Enum): + HEIGHT = 'height' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceHeight: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + search_query: Optional[str] = dataclasses.field(default='task', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('search_query'), 'exclude': lambda f: f is None }}) + r"""Search query to be used with search stream""" + SOURCE_TYPE: Final[Height] = dataclasses.field(default=Height.HEIGHT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_instagram.py b/src/airbyte_api/models/source_instagram.py index 726c5157..747a3252 100644 --- a/src/airbyte_api/models/source_instagram.py +++ b/src/airbyte_api/models/source_instagram.py @@ -19,10 +19,6 @@ class SourceInstagramInstagram(str, Enum): class SourceInstagram: access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) r"""The value of the access token generated with instagram_basic, instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram Public Content Access permissions. See the docs for more information""" - client_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id'), 'exclude': lambda f: f is None }}) - r"""The Client ID for your Oauth application""" - client_secret: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret'), 'exclude': lambda f: f is None }}) - r"""The Client Secret for your Oauth application""" SOURCE_TYPE: Final[SourceInstagramInstagram] = dataclasses.field(default=SourceInstagramInstagram.INSTAGRAM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date from which you'd like to replicate data for User Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. If left blank, the start date will be set to 2 years before the present date.""" diff --git a/src/airbyte_api/models/source_jira.py b/src/airbyte_api/models/source_jira.py index 017eb34a..ffbc5411 100644 --- a/src/airbyte_api/models/source_jira.py +++ b/src/airbyte_api/models/source_jira.py @@ -10,12 +10,6 @@ from typing import Final, List, Optional -class IssuesStreamExpandWith(str, Enum): - RENDERED_FIELDS = 'renderedFields' - TRANSITIONS = 'transitions' - CHANGELOG = 'changelog' - - class Jira(str, Enum): JIRA = 'jira' @@ -31,18 +25,10 @@ class SourceJira: r"""The user email for your Jira account which you used to generate the API token. This field is used for Authorization to your account by BasicAuth.""" enable_experimental_streams: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enable_experimental_streams'), 'exclude': lambda f: f is None }}) r"""Allow the use of experimental streams which rely on undocumented Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables for more info.""" - expand_issue_changelog: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expand_issue_changelog'), 'exclude': lambda f: f is None }}) - r"""(DEPRECATED) Expand the changelog when replicating issues.""" - expand_issue_transition: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('expand_issue_transition'), 'exclude': lambda f: f is None }}) - r"""(DEPRECATED) Expand the transitions when replicating issues.""" - issues_stream_expand_with: Optional[List[IssuesStreamExpandWith]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('issues_stream_expand_with'), 'exclude': lambda f: f is None }}) - r"""Select fields to Expand the `Issues` stream when replicating with:""" lookback_window_minutes: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window_minutes'), 'exclude': lambda f: f is None }}) r"""When set to N, the connector will always refresh resources created within the past N minutes. By default, updated objects that are not newly created are not incrementally synced.""" projects: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('projects'), 'exclude': lambda f: f is None }}) r"""List of Jira project keys to replicate data for, or leave it empty if you want to replicate data for all projects.""" - render_fields: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('render_fields'), 'exclude': lambda f: f is None }}) - r"""(DEPRECATED) Render issue fields in HTML format in addition to Jira JSON-like format.""" SOURCE_TYPE: Final[Jira] = dataclasses.field(default=Jira.JIRA, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date from which you want to replicate data from Jira, use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies to certain streams, and only data generated on or after the start date will be replicated. Or leave it empty if you want to replicate all data. For more information, refer to the documentation.""" diff --git a/src/airbyte_api/models/source_jotform.py b/src/airbyte_api/models/source_jotform.py new file mode 100644 index 00000000..8580b3ce --- /dev/null +++ b/src/airbyte_api/models/source_jotform.py @@ -0,0 +1,63 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional, Union + + +class SourceJotformSchemasAPIEndpoint(str, Enum): + ENTERPRISE = 'enterprise' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Enterprise: + enterprise_url: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enterprise_url') }}) + r"""Upgrade to Enterprise to make your API url your-domain.com/API or subdomain.jotform.com/API instead of api.jotform.com""" + API_ENDPOINT: Final[Optional[SourceJotformSchemasAPIEndpoint]] = dataclasses.field(default=SourceJotformSchemasAPIEndpoint.ENTERPRISE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint'), 'exclude': lambda f: f is None }}) + + + + +class SourceJotformAPIEndpoint(str, Enum): + BASIC = 'basic' + + +class BaseURLPrefix(str, Enum): + r"""You can access our API through the following URLs - Standard API Usage (Use the default API URL - https://api.jotform.com), For EU (Use the EU API URL - https://eu-api.jotform.com), For HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)""" + STANDARD = 'Standard' + EU = 'EU' + HIPAA = 'HIPAA' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class Basic: + API_ENDPOINT: Final[Optional[SourceJotformAPIEndpoint]] = dataclasses.field(default=SourceJotformAPIEndpoint.BASIC, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint'), 'exclude': lambda f: f is None }}) + url_prefix: Optional[BaseURLPrefix] = dataclasses.field(default=BaseURLPrefix.STANDARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_prefix'), 'exclude': lambda f: f is None }}) + r"""You can access our API through the following URLs - Standard API Usage (Use the default API URL - https://api.jotform.com), For EU (Use the EU API URL - https://eu-api.jotform.com), For HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)""" + + + + +class Jotform(str, Enum): + JOTFORM = 'jotform' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceJotform: + api_endpoint: APIEndpoint = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_endpoint') }}) + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + end_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Jotform] = dataclasses.field(default=Jotform.JOTFORM, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + + +APIEndpoint = Union[Basic, Enterprise] diff --git a/src/airbyte_api/models/source_kyve.py b/src/airbyte_api/models/source_kyve.py index cc0cfb31..6021b98e 100644 --- a/src/airbyte_api/models/source_kyve.py +++ b/src/airbyte_api/models/source_kyve.py @@ -19,10 +19,6 @@ class SourceKyve: r"""The IDs of the KYVE storage pool you want to archive. (Comma separated)""" start_ids: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_ids') }}) r"""The start-id defines, from which bundle id the pipeline should start to extract the data. (Comma separated)""" - max_pages: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('max_pages'), 'exclude': lambda f: f is None }}) - r"""The maximum amount of pages to go trough. Set to 'null' for all pages.""" - page_size: Optional[int] = dataclasses.field(default=100, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('page_size'), 'exclude': lambda f: f is None }}) - r"""The pagesize for pagination, smaller numbers are used in integration tests.""" SOURCE_TYPE: Final[Kyve] = dataclasses.field(default=Kyve.KYVE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) url_base: Optional[str] = dataclasses.field(default='https://api.kyve.network', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('url_base'), 'exclude': lambda f: f is None }}) r"""URL to the KYVE Chain API.""" diff --git a/src/airbyte_api/models/source_linkedin_ads.py b/src/airbyte_api/models/source_linkedin_ads.py index b2991e68..2bfbabe4 100644 --- a/src/airbyte_api/models/source_linkedin_ads.py +++ b/src/airbyte_api/models/source_linkedin_ads.py @@ -101,6 +101,8 @@ class SourceLinkedinAds: r"""Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs.""" ad_analytics_reports: Optional[List[AdAnalyticsReportConfiguration]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ad_analytics_reports'), 'exclude': lambda f: f is None }}) credentials: Optional[SourceLinkedinAdsAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) + lookback_window: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lookback_window'), 'exclude': lambda f: f is None }}) + r"""How far into the past to look for records. (in days)""" SOURCE_TYPE: Final[SourceLinkedinAdsLinkedinAds] = dataclasses.field(default=SourceLinkedinAdsLinkedinAds.LINKEDIN_ADS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_mailchimp.py b/src/airbyte_api/models/source_mailchimp.py index 2184b937..76b7d615 100644 --- a/src/airbyte_api/models/source_mailchimp.py +++ b/src/airbyte_api/models/source_mailchimp.py @@ -50,8 +50,6 @@ class SourceMailchimpMailchimp(str, Enum): @dataclasses.dataclass class SourceMailchimp: credentials: Optional[SourceMailchimpAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) - data_center: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('data_center'), 'exclude': lambda f: f is None }}) - r"""Technical fields used to identify datacenter to send request to""" SOURCE_TYPE: Final[SourceMailchimpMailchimp] = dataclasses.field(default=SourceMailchimpMailchimp.MAILCHIMP, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The date from which you want to start syncing data for Incremental streams. Only records that have been created or modified since this date will be synced. If left blank, all data will by synced.""" diff --git a/src/airbyte_api/models/source_microsoft_onedrive.py b/src/airbyte_api/models/source_microsoft_onedrive.py index 244d41b3..0676fd77 100644 --- a/src/airbyte_api/models/source_microsoft_onedrive.py +++ b/src/airbyte_api/models/source_microsoft_onedrive.py @@ -243,8 +243,6 @@ class SourceMicrosoftOnedriveFileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) r"""When enabled, syncs will not validate or structure records against the stream's schema.""" validation_policy: Optional[SourceMicrosoftOnedriveValidationPolicy] = dataclasses.field(default=SourceMicrosoftOnedriveValidationPolicy.EMIT_RECORD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('validation_policy'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_microsoft_sharepoint.py b/src/airbyte_api/models/source_microsoft_sharepoint.py index 3defd0d2..514e5ac4 100644 --- a/src/airbyte_api/models/source_microsoft_sharepoint.py +++ b/src/airbyte_api/models/source_microsoft_sharepoint.py @@ -255,8 +255,6 @@ class SourceMicrosoftSharepointFileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) r"""The number of resent files which will be used to discover the schema for this stream.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_mssql.py b/src/airbyte_api/models/source_mssql.py index 7a65aa00..cceaacf3 100644 --- a/src/airbyte_api/models/source_mssql.py +++ b/src/airbyte_api/models/source_mssql.py @@ -88,7 +88,7 @@ class SourceMssqlSchemasSslMethod(str, Enum): @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class Unencrypted: +class SourceMssqlUnencrypted: r"""Data transfer will not be encrypted.""" SSL_METHOD: Final[SourceMssqlSchemasSslMethod] = dataclasses.field(default=SourceMssqlSchemasSslMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_method') }}) @@ -182,6 +182,6 @@ class SourceMssql: UpdateMethod = Union[ReadChangesUsingChangeDataCaptureCDC, ScanChangesWithUserDefinedCursor] -SourceMssqlSSLMethod = Union[Unencrypted, SourceMssqlEncryptedTrustServerCertificate, SourceMssqlEncryptedVerifyCertificate] +SourceMssqlSSLMethod = Union[SourceMssqlUnencrypted, SourceMssqlEncryptedTrustServerCertificate, SourceMssqlEncryptedVerifyCertificate] SourceMssqlSSHTunnelMethod = Union[SourceMssqlNoTunnel, SourceMssqlSSHKeyAuthentication, SourceMssqlPasswordAuthentication] diff --git a/src/airbyte_api/models/source_mysql.py b/src/airbyte_api/models/source_mysql.py index 823df582..bfe238f4 100644 --- a/src/airbyte_api/models/source_mysql.py +++ b/src/airbyte_api/models/source_mysql.py @@ -196,6 +196,8 @@ class SourceMysql: port: Optional[int] = dataclasses.field(default=3306, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('port'), 'exclude': lambda f: f is None }}) r"""The port to connect to.""" SOURCE_TYPE: Final[SourceMysqlMysql] = dataclasses.field(default=SourceMysqlMysql.MYSQL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + ssl: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl'), 'exclude': lambda f: f is None }}) + r"""Encrypt data using SSL.""" ssl_mode: Optional[SourceMysqlSSLModes] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_mode'), 'exclude': lambda f: f is None }}) r"""SSL connection modes. Read more in the docs.""" tunnel_method: Optional[SourceMysqlSSHTunnelMethod] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('tunnel_method'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_nylas.py b/src/airbyte_api/models/source_nylas.py new file mode 100644 index 00000000..42cab4f8 --- /dev/null +++ b/src/airbyte_api/models/source_nylas.py @@ -0,0 +1,31 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class APIServer(str, Enum): + US = 'us' + EU = 'eu' + + +class Nylas(str, Enum): + NYLAS = 'nylas' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceNylas: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + api_server: APIServer = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_server') }}) + end_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('end_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Nylas] = dataclasses.field(default=Nylas.NYLAS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_oracle.py b/src/airbyte_api/models/source_oracle.py index 4319f541..ff13e9be 100644 --- a/src/airbyte_api/models/source_oracle.py +++ b/src/airbyte_api/models/source_oracle.py @@ -36,39 +36,52 @@ class ServiceName: -class SourceOracleEncryptionMethod(str, Enum): +class SourceOracleSchemasEncryptionEncryptionMethod(str, Enum): ENCRYPTED_VERIFY_CERTIFICATE = 'encrypted_verify_certificate' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class TLSEncryptedVerifyCertificate: +class SourceOracleTLSEncryptedVerifyCertificate: r"""Verify and use the certificate provided by the server.""" ssl_certificate: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ssl_certificate') }}) r"""Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.""" - ENCRYPTION_METHOD: Final[SourceOracleEncryptionMethod] = dataclasses.field(default=SourceOracleEncryptionMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }}) + ENCRYPTION_METHOD: Final[SourceOracleSchemasEncryptionEncryptionMethod] = dataclasses.field(default=SourceOracleSchemasEncryptionEncryptionMethod.ENCRYPTED_VERIFY_CERTIFICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }}) -class EncryptionAlgorithm(str, Enum): +class SourceOracleEncryptionAlgorithm(str, Enum): r"""This parameter defines what encryption algorithm is used.""" AES256 = 'AES256' RC4_56 = 'RC4_56' THREE_DES168 = '3DES168' -class EncryptionMethod(str, Enum): +class SourceOracleSchemasEncryptionMethod(str, Enum): CLIENT_NNE = 'client_nne' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass -class NativeNetworkEncryptionNNE: +class SourceOracleNativeNetworkEncryptionNNE: r"""The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.""" - encryption_algorithm: Optional[EncryptionAlgorithm] = dataclasses.field(default=EncryptionAlgorithm.AES256, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }}) + encryption_algorithm: Optional[SourceOracleEncryptionAlgorithm] = dataclasses.field(default=SourceOracleEncryptionAlgorithm.AES256, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_algorithm'), 'exclude': lambda f: f is None }}) r"""This parameter defines what encryption algorithm is used.""" - ENCRYPTION_METHOD: Final[EncryptionMethod] = dataclasses.field(default=EncryptionMethod.CLIENT_NNE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }}) + ENCRYPTION_METHOD: Final[SourceOracleSchemasEncryptionMethod] = dataclasses.field(default=SourceOracleSchemasEncryptionMethod.CLIENT_NNE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }}) + + + + +class SourceOracleEncryptionMethod(str, Enum): + UNENCRYPTED = 'unencrypted' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceOracleUnencrypted: + r"""Data transfer will not be encrypted.""" + ENCRYPTION_METHOD: Final[SourceOracleEncryptionMethod] = dataclasses.field(default=SourceOracleEncryptionMethod.UNENCRYPTED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption_method') }}) @@ -138,14 +151,14 @@ class SourceOracleNoTunnel: @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceOracle: - encryption: Encryption = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption') }}) - r"""The encryption method with is used when communicating with the database.""" host: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('host') }}) r"""Hostname of the database.""" username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) r"""The username which is used to access the database.""" connection_data: Optional[ConnectBy] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('connection_data'), 'exclude': lambda f: f is None }}) r"""Connect data that will be used for DB connection""" + encryption: Optional[SourceOracleEncryption] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encryption'), 'exclude': lambda f: f is None }}) + r"""The encryption method with is used when communicating with the database.""" jdbc_url_params: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('jdbc_url_params'), 'exclude': lambda f: f is None }}) r"""Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).""" password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) @@ -166,6 +179,6 @@ class SourceOracle: ConnectBy = Union[ServiceName, SystemIDSID] -Encryption = Union[NativeNetworkEncryptionNNE, TLSEncryptedVerifyCertificate] +SourceOracleEncryption = Union[SourceOracleUnencrypted, SourceOracleNativeNetworkEncryptionNNE, SourceOracleTLSEncryptedVerifyCertificate] SourceOracleSSHTunnelMethod = Union[SourceOracleNoTunnel, SourceOracleSSHKeyAuthentication, SourceOraclePasswordAuthentication] diff --git a/src/airbyte_api/models/source_picqer.py b/src/airbyte_api/models/source_picqer.py new file mode 100644 index 00000000..ff66e38a --- /dev/null +++ b/src/airbyte_api/models/source_picqer.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Picqer(str, Enum): + PICQER = 'picqer' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePicqer: + organization_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_name') }}) + r"""The organization name which is used to login to picqer""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Picqer] = dataclasses.field(default=Picqer.PICQER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_piwik.py b/src/airbyte_api/models/source_piwik.py new file mode 100644 index 00000000..d301a2d9 --- /dev/null +++ b/src/airbyte_api/models/source_piwik.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Piwik(str, Enum): + PIWIK = 'piwik' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourcePiwik: + client_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_id') }}) + client_secret: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('client_secret') }}) + organization_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_id') }}) + r"""The organization id appearing at URL of your piwik website""" + SOURCE_TYPE: Final[Piwik] = dataclasses.field(default=Piwik.PIWIK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_productboard.py b/src/airbyte_api/models/source_productboard.py new file mode 100644 index 00000000..a33db2d5 --- /dev/null +++ b/src/airbyte_api/models/source_productboard.py @@ -0,0 +1,25 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Productboard(str, Enum): + PRODUCTBOARD = 'productboard' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceProductboard: + access_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('access_token') }}) + r"""Your Productboard access token. See https://developer.productboard.com/reference/authentication for steps to generate one.""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Productboard] = dataclasses.field(default=Productboard.PRODUCTBOARD, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_productive.py b/src/airbyte_api/models/source_productive.py new file mode 100644 index 00000000..6fca4b53 --- /dev/null +++ b/src/airbyte_api/models/source_productive.py @@ -0,0 +1,23 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class Productive(str, Enum): + PRODUCTIVE = 'productive' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceProductive: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + organization_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('organization_id') }}) + r"""The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations` page""" + SOURCE_TYPE: Final[Productive] = dataclasses.field(default=Productive.PRODUCTIVE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_s3.py b/src/airbyte_api/models/source_s3.py index b61be6c1..ac499043 100644 --- a/src/airbyte_api/models/source_s3.py +++ b/src/airbyte_api/models/source_s3.py @@ -10,126 +10,11 @@ from typing import Final, List, Optional, Union -class SourceS3SchemasFormatFiletype(str, Enum): - JSONL = 'jsonl' - - -class UnexpectedFieldBehavior(str, Enum): - r"""How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details""" - IGNORE = 'ignore' - INFER = 'infer' - ERROR = 'error' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Jsonl: - r"""This connector uses PyArrow for JSON Lines (jsonl) file parsing.""" - block_size: Optional[int] = dataclasses.field(default=0, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('block_size'), 'exclude': lambda f: f is None }}) - r"""The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.""" - FILETYPE: Final[Optional[SourceS3SchemasFormatFiletype]] = dataclasses.field(default=SourceS3SchemasFormatFiletype.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) - newlines_in_values: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('newlines_in_values'), 'exclude': lambda f: f is None }}) - r"""Whether newline characters are allowed in JSON values. Turning this on may affect performance. Leave blank to default to False.""" - unexpected_field_behavior: Optional[UnexpectedFieldBehavior] = dataclasses.field(default=UnexpectedFieldBehavior.INFER, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('unexpected_field_behavior'), 'exclude': lambda f: f is None }}) - r"""How JSON fields outside of explicit_schema (if given) are treated. Check PyArrow documentation for details""" - - - - -class SourceS3SchemasFiletype(str, Enum): - AVRO = 'avro' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Avro: - r"""This connector utilises fastavro for Avro parsing.""" - FILETYPE: Final[Optional[SourceS3SchemasFiletype]] = dataclasses.field(default=SourceS3SchemasFiletype.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) - - - - -class SourceS3Filetype(str, Enum): - PARQUET = 'parquet' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Parquet: - r"""This connector utilises PyArrow (Apache Arrow) for Parquet parsing.""" - batch_size: Optional[int] = dataclasses.field(default=65536, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('batch_size'), 'exclude': lambda f: f is None }}) - r"""Maximum number of records per batch read from the input files. Batches may be smaller if there aren’t enough rows in the file. This option can help avoid out-of-memory errors if your data is particularly wide.""" - buffer_size: Optional[int] = dataclasses.field(default=2, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('buffer_size'), 'exclude': lambda f: f is None }}) - r"""Perform read buffering when deserializing individual column chunks. By default every group column will be loaded fully to memory. This option can help avoid out-of-memory errors if your data is particularly wide.""" - columns: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('columns'), 'exclude': lambda f: f is None }}) - r"""If you only want to sync a subset of the columns from the file(s), add the columns you want here as a comma-delimited list. Leave it empty to sync all columns.""" - FILETYPE: Final[Optional[SourceS3Filetype]] = dataclasses.field(default=SourceS3Filetype.PARQUET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) - - - - -class Filetype(str, Enum): - CSV = 'csv' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Csv: - r"""This connector utilises PyArrow (Apache Arrow) for CSV parsing.""" - additional_reader_options: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('additional_reader_options'), 'exclude': lambda f: f is None }}) - r"""Optionally add a valid JSON string here to provide additional options to the csv reader. Mappings must correspond to options detailed here. 'column_types' is used internally to handle schema so overriding that would likely cause problems.""" - advanced_options: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('advanced_options'), 'exclude': lambda f: f is None }}) - r"""Optionally add a valid JSON string here to provide additional Pyarrow ReadOptions. Specify 'column_names' here if your CSV doesn't have header, or if you want to use custom column names. 'block_size' and 'encoding' are already used above, specify them again here will override the values above.""" - block_size: Optional[int] = dataclasses.field(default=10000, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('block_size'), 'exclude': lambda f: f is None }}) - r"""The chunk size in bytes to process at a time in memory from each file. If your data is particularly wide and failing during schema detection, increasing this should solve it. Beware of raising this too high as you could hit OOM errors.""" - delimiter: Optional[str] = dataclasses.field(default=',', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('delimiter'), 'exclude': lambda f: f is None }}) - r"""The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.""" - double_quote: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('double_quote'), 'exclude': lambda f: f is None }}) - r"""Whether two quotes in a quoted CSV value denote a single quote in the data.""" - encoding: Optional[str] = dataclasses.field(default='utf8', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('encoding'), 'exclude': lambda f: f is None }}) - r"""The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.""" - escape_char: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('escape_char'), 'exclude': lambda f: f is None }}) - r"""The character used for escaping special characters. To disallow escaping, leave this field blank.""" - FILETYPE: Final[Optional[Filetype]] = dataclasses.field(default=Filetype.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) - infer_datatypes: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('infer_datatypes'), 'exclude': lambda f: f is None }}) - r"""Configures whether a schema for the source should be inferred from the current data or not. If set to false and a custom schema is set, then the manually enforced schema is used. If a schema is not manually set, and this is set to false, then all fields will be read as strings""" - newlines_in_values: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('newlines_in_values'), 'exclude': lambda f: f is None }}) - r"""Whether newline characters are allowed in CSV values. Turning this on may affect performance. Leave blank to default to False.""" - quote_char: Optional[str] = dataclasses.field(default='"', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('quote_char'), 'exclude': lambda f: f is None }}) - r"""The character used for quoting CSV values. To disallow quoting, make this field blank.""" - - - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class S3AmazonWebServices: - r"""Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services""" - aws_access_key_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_access_key_id'), 'exclude': lambda f: f is None }}) - r"""In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" - aws_secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_access_key'), 'exclude': lambda f: f is None }}) - r"""In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" - bucket: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('bucket'), 'exclude': lambda f: f is None }}) - r"""Name of the S3 bucket where the file(s) exist.""" - endpoint: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint'), 'exclude': lambda f: f is None }}) - r"""Endpoint to an S3 compatible service. Leave empty to use AWS.""" - path_prefix: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('path_prefix'), 'exclude': lambda f: f is None }}) - r"""By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate.""" - region_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region_name'), 'exclude': lambda f: f is None }}) - r"""AWS region where the S3 bucket is located. If not provided, the region will be determined automatically.""" - role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) - r"""Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. Set the External ID to the Airbyte workspace ID, which can be found in the URL of this page.""" - start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) - r"""UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.""" - - - - class SourceS3S3(str, Enum): S3 = 's3' -class SourceS3SchemasStreamsFormatFormat5Filetype(str, Enum): +class SourceS3SchemasStreamsFormatFormatFiletype(str, Enum): UNSTRUCTURED = 'unstructured' @@ -158,7 +43,7 @@ class SourceS3ParsingStrategy(str, Enum): @dataclasses.dataclass class SourceS3UnstructuredDocumentFormat: r"""Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.""" - FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFormat5Filetype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFormat5Filetype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFormatFiletype.UNSTRUCTURED, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) processing: Optional[SourceS3Processing] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('processing'), 'exclude': lambda f: f is None }}) r"""Processing configuration""" skip_unprocessable_files: Optional[bool] = dataclasses.field(default=True, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('skip_unprocessable_files'), 'exclude': lambda f: f is None }}) @@ -169,7 +54,7 @@ class SourceS3UnstructuredDocumentFormat: -class SourceS3SchemasStreamsFormatFormat4Filetype(str, Enum): +class SourceS3SchemasStreamsFormatFiletype(str, Enum): PARQUET = 'parquet' @@ -178,24 +63,24 @@ class SourceS3SchemasStreamsFormatFormat4Filetype(str, Enum): class SourceS3ParquetFormat: decimal_as_float: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('decimal_as_float'), 'exclude': lambda f: f is None }}) r"""Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.""" - FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFormat4Filetype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFormat4Filetype.PARQUET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFiletype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFiletype.PARQUET, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) -class SourceS3SchemasStreamsFormatFormatFiletype(str, Enum): +class SourceS3SchemasStreamsFiletype(str, Enum): JSONL = 'jsonl' @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceS3JsonlFormat: - FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFormatFiletype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFormatFiletype.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + FILETYPE: Final[Optional[SourceS3SchemasStreamsFiletype]] = dataclasses.field(default=SourceS3SchemasStreamsFiletype.JSONL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) -class SourceS3SchemasStreamsFormatFiletype(str, Enum): +class SourceS3SchemasFiletype(str, Enum): CSV = 'csv' @@ -237,12 +122,6 @@ class SourceS3FromCSV: -class SourceS3InferenceType(str, Enum): - r"""How to infer the types of the columns. If none, inference default to strings.""" - NONE = 'None' - PRIMITIVE_TYPES_ONLY = 'Primitive Types Only' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceS3CSVFormat: @@ -256,13 +135,11 @@ class SourceS3CSVFormat: r"""The character used for escaping special characters. To disallow escaping, leave this field blank.""" false_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('false_values'), 'exclude': lambda f: f is None }}) r"""A set of case-sensitive strings that should be interpreted as false values.""" - FILETYPE: Final[Optional[SourceS3SchemasStreamsFormatFiletype]] = dataclasses.field(default=SourceS3SchemasStreamsFormatFiletype.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + FILETYPE: Final[Optional[SourceS3SchemasFiletype]] = dataclasses.field(default=SourceS3SchemasFiletype.CSV, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) header_definition: Optional[SourceS3CSVHeaderDefinition] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('header_definition'), 'exclude': lambda f: f is None }}) r"""How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.""" ignore_errors_on_fields_mismatch: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ignore_errors_on_fields_mismatch'), 'exclude': lambda f: f is None }}) r"""Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.""" - inference_type: Optional[SourceS3InferenceType] = dataclasses.field(default=SourceS3InferenceType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('inference_type'), 'exclude': lambda f: f is None }}) - r"""How to infer the types of the columns. If none, inference default to strings.""" null_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('null_values'), 'exclude': lambda f: f is None }}) r"""A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.""" quote_char: Optional[str] = dataclasses.field(default='"', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('quote_char'), 'exclude': lambda f: f is None }}) @@ -279,7 +156,7 @@ class SourceS3CSVFormat: -class SourceS3SchemasStreamsFiletype(str, Enum): +class SourceS3Filetype(str, Enum): AVRO = 'avro' @@ -288,7 +165,7 @@ class SourceS3SchemasStreamsFiletype(str, Enum): class SourceS3AvroFormat: double_as_string: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('double_as_string'), 'exclude': lambda f: f is None }}) r"""Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.""" - FILETYPE: Final[Optional[SourceS3SchemasStreamsFiletype]] = dataclasses.field(default=SourceS3SchemasStreamsFiletype.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) + FILETYPE: Final[Optional[SourceS3Filetype]] = dataclasses.field(default=SourceS3Filetype.AVRO, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('filetype'), 'exclude': lambda f: f is None }}) @@ -313,10 +190,6 @@ class SourceS3FileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - legacy_prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('legacy_prefix'), 'exclude': lambda f: f is None }}) - r"""The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) r"""The number of resent files which will be used to discover the schema for this stream.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) @@ -341,30 +214,18 @@ class SourceS3: r"""In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" aws_secret_access_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('aws_secret_access_key'), 'exclude': lambda f: f is None }}) r"""In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.""" - dataset: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dataset'), 'exclude': lambda f: f is None }}) - r"""Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.""" endpoint: Optional[str] = dataclasses.field(default='', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('endpoint'), 'exclude': lambda f: f is None }}) - r"""Endpoint to an S3 compatible service. Leave empty to use AWS. The custom endpoint must be secure, but the 'https' prefix is not required.""" - format: Optional[SourceS3FileFormat] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('format'), 'exclude': lambda f: f is None }}) - r"""Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate""" - path_pattern: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('path_pattern'), 'exclude': lambda f: f is None }}) - r"""Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.""" - provider: Optional[S3AmazonWebServices] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('provider'), 'exclude': lambda f: f is None }}) - r"""Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services""" + r"""Endpoint to an S3 compatible service. Leave empty to use AWS.""" region_name: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('region_name'), 'exclude': lambda f: f is None }}) r"""AWS region where the S3 bucket is located. If not provided, the region will be determined automatically.""" role_arn: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('role_arn'), 'exclude': lambda f: f is None }}) r"""Specifies the Amazon Resource Name (ARN) of an IAM role that you want to use to perform operations requested using this profile. Set the External ID to the Airbyte workspace ID, which can be found in the URL of this page.""" - schema: Optional[str] = dataclasses.field(default='{}', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schema'), 'exclude': lambda f: f is None }}) - r"""Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { \\"column\\" : \\"type\\" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.""" SOURCE_TYPE: Final[SourceS3S3] = dataclasses.field(default=SourceS3S3.S3, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.""" -SourceS3FileFormat = Union[Csv, Parquet, Avro, Jsonl] - SourceS3Processing = Union[SourceS3Local] SourceS3CSVHeaderDefinition = Union[SourceS3FromCSV, SourceS3Autogenerated, SourceS3UserProvided] diff --git a/src/airbyte_api/models/source_senseforce.py b/src/airbyte_api/models/source_senseforce.py index c0a45055..2e2453f2 100644 --- a/src/airbyte_api/models/source_senseforce.py +++ b/src/airbyte_api/models/source_senseforce.py @@ -6,7 +6,7 @@ from dataclasses_json import Undefined, dataclass_json from datetime import date from enum import Enum -from typing import Final, Optional +from typing import Final class Senseforce(str, Enum): @@ -24,8 +24,6 @@ class SourceSenseforce: r"""The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you want to synchronize needs to be implemented as a separate airbyte source).""" start_date: date = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.dateisoformat(False), 'decoder': utils.datefromisoformat }}) r"""UTC date and time in the format 2017-01-25. Only data with \\"Timestamp\\" after this date will be replicated. Important note: This start date must be set to the first day of where your dataset provides data. If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later""" - slice_range: Optional[int] = dataclasses.field(default=10, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('slice_range'), 'exclude': lambda f: f is None }}) - r"""The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted and the more likely one could run into rate limites. Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return data - meaning it could take in effect longer than with more smaller time slices. If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might change the setting to 10 or more.""" SOURCE_TYPE: Final[Senseforce] = dataclasses.field(default=Senseforce.SENSEFORCE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) diff --git a/src/airbyte_api/models/source_sftp_bulk.py b/src/airbyte_api/models/source_sftp_bulk.py index c8fe368e..a9bd8add 100644 --- a/src/airbyte_api/models/source_sftp_bulk.py +++ b/src/airbyte_api/models/source_sftp_bulk.py @@ -192,12 +192,6 @@ class SourceSftpBulkFromCSV: -class SourceSftpBulkInferenceType(str, Enum): - r"""How to infer the types of the columns. If none, inference default to strings.""" - NONE = 'None' - PRIMITIVE_TYPES_ONLY = 'Primitive Types Only' - - @dataclass_json(undefined=Undefined.EXCLUDE) @dataclasses.dataclass class SourceSftpBulkCSVFormat: @@ -216,8 +210,6 @@ class SourceSftpBulkCSVFormat: r"""How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.""" ignore_errors_on_fields_mismatch: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ignore_errors_on_fields_mismatch'), 'exclude': lambda f: f is None }}) r"""Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema.""" - inference_type: Optional[SourceSftpBulkInferenceType] = dataclasses.field(default=SourceSftpBulkInferenceType.NONE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('inference_type'), 'exclude': lambda f: f is None }}) - r"""How to infer the types of the columns. If none, inference default to strings.""" null_values: Optional[List[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('null_values'), 'exclude': lambda f: f is None }}) r"""A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.""" quote_char: Optional[str] = dataclasses.field(default='"', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('quote_char'), 'exclude': lambda f: f is None }}) @@ -268,10 +260,6 @@ class SourceSftpBulkFileBasedStreamConfig: r"""The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.""" input_schema: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('input_schema'), 'exclude': lambda f: f is None }}) r"""The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.""" - legacy_prefix: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('legacy_prefix'), 'exclude': lambda f: f is None }}) - r"""The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.""" - primary_key: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('primary_key'), 'exclude': lambda f: f is None }}) - r"""The column or columns (for a composite key) that serves as the unique identifier of a record. If empty, the primary key will default to the parser's default primary key.""" recent_n_files_to_read_for_schema_discovery: Optional[int] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('recent_n_files_to_read_for_schema_discovery'), 'exclude': lambda f: f is None }}) r"""The number of resent files which will be used to discover the schema for this stream.""" schemaless: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('schemaless'), 'exclude': lambda f: f is None }}) diff --git a/src/airbyte_api/models/source_shortcut.py b/src/airbyte_api/models/source_shortcut.py new file mode 100644 index 00000000..8246ba2f --- /dev/null +++ b/src/airbyte_api/models/source_shortcut.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Shortcut(str, Enum): + SHORTCUT = 'shortcut' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceShortcut: + api_key_2: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key_2') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + query: Optional[str] = dataclasses.field(default='title:Our first Epic', metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('query'), 'exclude': lambda f: f is None }}) + r"""Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`""" + SOURCE_TYPE: Final[Shortcut] = dataclasses.field(default=Shortcut.SHORTCUT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_smartsheets.py b/src/airbyte_api/models/source_smartsheets.py index 1a1a0ca9..a4244c9d 100644 --- a/src/airbyte_api/models/source_smartsheets.py +++ b/src/airbyte_api/models/source_smartsheets.py @@ -79,8 +79,6 @@ class SourceSmartsheets: metadata_fields: Optional[List[Validenums]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('metadata_fields'), 'exclude': lambda f: f is None }}) r"""A List of available columns which metadata can be pulled from.""" SOURCE_TYPE: Final[SourceSmartsheetsSmartsheets] = dataclasses.field(default=SourceSmartsheetsSmartsheets.SMARTSHEETS, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - start_datetime: Optional[datetime] = dataclasses.field(default=dateutil.parser.isoparse('2020-01-01T00:00:00+00:00'), metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_datetime'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) - r"""Only rows modified after this date/time will be replicated. This should be an ISO 8601 string, for instance: `2000-01-01T13:00:00`""" diff --git a/src/airbyte_api/models/source_survicate.py b/src/airbyte_api/models/source_survicate.py new file mode 100644 index 00000000..908a67b9 --- /dev/null +++ b/src/airbyte_api/models/source_survicate.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final + + +class Survicate(str, Enum): + SURVICATE = 'survicate' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceSurvicate: + api_key: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_key') }}) + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + SOURCE_TYPE: Final[Survicate] = dataclasses.field(default=Survicate.SURVICATE, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_teamwork.py b/src/airbyte_api/models/source_teamwork.py new file mode 100644 index 00000000..965a7909 --- /dev/null +++ b/src/airbyte_api/models/source_teamwork.py @@ -0,0 +1,27 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +import dateutil.parser +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from datetime import datetime +from enum import Enum +from typing import Final, Optional + + +class Teamwork(str, Enum): + TEAMWORK = 'teamwork' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceTeamwork: + site_name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('site_name') }}) + r"""The teamwork site name appearing at the url""" + start_date: datetime = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(False), 'decoder': dateutil.parser.isoparse }}) + username: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('username') }}) + password: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password'), 'exclude': lambda f: f is None }}) + SOURCE_TYPE: Final[Teamwork] = dataclasses.field(default=Teamwork.TEAMWORK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_when_i_work.py b/src/airbyte_api/models/source_when_i_work.py new file mode 100644 index 00000000..c3118a92 --- /dev/null +++ b/src/airbyte_api/models/source_when_i_work.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +import dataclasses +from airbyte_api import utils +from dataclasses_json import Undefined, dataclass_json +from enum import Enum +from typing import Final + + +class WhenIWork(str, Enum): + WHEN_I_WORK = 'when-i-work' + + +@dataclass_json(undefined=Undefined.EXCLUDE) +@dataclasses.dataclass +class SourceWhenIWork: + email: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('email') }}) + r"""Email of your when-i-work account""" + password: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('password') }}) + r"""Password for your when-i-work account""" + SOURCE_TYPE: Final[WhenIWork] = dataclasses.field(default=WhenIWork.WHEN_I_WORK, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) + + diff --git a/src/airbyte_api/models/source_zendesk_sell.py b/src/airbyte_api/models/source_zendesk_sell.py deleted file mode 100644 index 858b7842..00000000 --- a/src/airbyte_api/models/source_zendesk_sell.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from airbyte_api import utils -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from typing import Final - - -class ZendeskSell(str, Enum): - ZENDESK_SELL = 'zendesk-sell' - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class SourceZendeskSell: - api_token: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('api_token') }}) - r"""The API token for authenticating to Zendesk Sell""" - SOURCE_TYPE: Final[ZendeskSell] = dataclasses.field(default=ZendeskSell.ZENDESK_SELL, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) - - diff --git a/src/airbyte_api/models/source_zendesk_support.py b/src/airbyte_api/models/source_zendesk_support.py index a891010e..db845e76 100644 --- a/src/airbyte_api/models/source_zendesk_support.py +++ b/src/airbyte_api/models/source_zendesk_support.py @@ -57,8 +57,6 @@ class SourceZendeskSupport: r"""This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain.""" credentials: Optional[SourceZendeskSupportAuthentication] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('credentials'), 'exclude': lambda f: f is None }}) r"""Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users.""" - ignore_pagination: Optional[bool] = dataclasses.field(default=False, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ignore_pagination'), 'exclude': lambda f: f is None }}) - r"""Makes each stream read a single page of data.""" SOURCE_TYPE: Final[SourceZendeskSupportZendeskSupport] = dataclasses.field(default=SourceZendeskSupportZendeskSupport.ZENDESK_SUPPORT, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('sourceType') }}) start_date: Optional[datetime] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('start_date'), 'encoder': utils.datetimeisoformat(True), 'decoder': dateutil.parser.isoparse, 'exclude': lambda f: f is None }}) r"""The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated.""" diff --git a/src/airbyte_api/models/sourceconfiguration.py b/src/airbyte_api/models/sourceconfiguration.py index 39f1054c..091fda2e 100644 --- a/src/airbyte_api/models/sourceconfiguration.py +++ b/src/airbyte_api/models/sourceconfiguration.py @@ -1,6 +1,7 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations +from .source_7shifts import Source7shifts from .source_aha import SourceAha from .source_airbyte import SourceAirbyte from .source_aircall import SourceAircall @@ -10,6 +11,8 @@ from .source_amazon_sqs import SourceAmazonSqs from .source_amplitude import SourceAmplitude from .source_apify_dataset import SourceApifyDataset +from .source_appcues import SourceAppcues +from .source_appfigures import SourceAppfigures from .source_appfollow import SourceAppfollow from .source_asana import SourceAsana from .source_auth0 import SourceAuth0 @@ -20,13 +23,20 @@ from .source_basecamp import SourceBasecamp from .source_bigquery import SourceBigquery from .source_bing_ads import SourceBingAds +from .source_bitly import SourceBitly from .source_braintree import SourceBraintree from .source_braze import SourceBraze from .source_breezy_hr import SourceBreezyHr +from .source_brevo import SourceBrevo +from .source_buildkite import SourceBuildkite +from .source_buzzsprout import SourceBuzzsprout from .source_calendly import SourceCalendly +from .source_canny import SourceCanny from .source_cart import SourceCart +from .source_chameleon import SourceChameleon from .source_chargebee import SourceChargebee from .source_chartmogul import SourceChartmogul +from .source_cimis import SourceCimis from .source_clazar import SourceClazar from .source_clickhouse import SourceClickhouse from .source_clickup_api import SourceClickupAPI @@ -47,9 +57,9 @@ from .source_dockerhub import SourceDockerhub from .source_dremio import SourceDremio from .source_dynamodb import SourceDynamodb -from .source_e2e_test_cloud import SourceE2eTestCloud from .source_emailoctopus import SourceEmailoctopus from .source_exchange_rates import SourceExchangeRates +from .source_ezofficeinventory import SourceEzofficeinventory from .source_facebook_marketing import SourceFacebookMarketing from .source_faker import SourceFaker from .source_fauna import SourceFauna @@ -59,6 +69,7 @@ from .source_freshcaller import SourceFreshcaller from .source_freshdesk import SourceFreshdesk from .source_freshsales import SourceFreshsales +from .source_front import SourceFront from .source_gainsight_px import SourceGainsightPx from .source_gcs import SourceGcs from .source_getlago import SourceGetlago @@ -74,11 +85,14 @@ from .source_google_pagespeed_insights import SourceGooglePagespeedInsights from .source_google_search_console import SourceGoogleSearchConsole from .source_google_sheets import SourceGoogleSheets +from .source_google_tasks import SourceGoogleTasks from .source_google_webfonts import SourceGoogleWebfonts from .source_greenhouse import SourceGreenhouse from .source_gridly import SourceGridly +from .source_guru import SourceGuru from .source_hardcoded_records import SourceHardcodedRecords from .source_harvest import SourceHarvest +from .source_height import SourceHeight from .source_hibob import SourceHibob from .source_high_level import SourceHighLevel from .source_hubplanner import SourceHubplanner @@ -90,6 +104,7 @@ from .source_ip2whois import SourceIp2whois from .source_iterable import SourceIterable from .source_jira import SourceJira +from .source_jotform import SourceJotform from .source_k6_cloud import SourceK6Cloud from .source_klarna import SourceKlarna from .source_klaviyo import SourceKlaviyo @@ -121,6 +136,7 @@ from .source_netsuite import SourceNetsuite from .source_northpass_lms import SourceNorthpassLms from .source_notion import SourceNotion +from .source_nylas import SourceNylas from .source_nytimes import SourceNytimes from .source_okta import SourceOkta from .source_omnisend import SourceOmnisend @@ -136,8 +152,10 @@ from .source_pennylane import SourcePennylane from .source_persistiq import SourcePersistiq from .source_pexels_api import SourcePexelsAPI +from .source_picqer import SourcePicqer from .source_pinterest import SourcePinterest from .source_pipedrive import SourcePipedrive +from .source_piwik import SourcePiwik from .source_planhat import SourcePlanhat from .source_pocket import SourcePocket from .source_pokeapi import SourcePokeapi @@ -146,6 +164,8 @@ from .source_posthog import SourcePosthog from .source_postmarkapp import SourcePostmarkapp from .source_prestashop import SourcePrestashop +from .source_productboard import SourceProductboard +from .source_productive import SourceProductive from .source_pypi import SourcePypi from .source_qualaroo import SourceQualaroo from .source_railz import SourceRailz @@ -172,6 +192,7 @@ from .source_sftp import SourceSftp from .source_sftp_bulk import SourceSftpBulk from .source_shopify import SourceShopify +from .source_shortcut import SourceShortcut from .source_shortio import SourceShortio from .source_slack import SourceSlack from .source_smaily import SourceSmaily @@ -186,6 +207,8 @@ from .source_stripe import SourceStripe from .source_survey_sparrow import SourceSurveySparrow from .source_surveymonkey import SourceSurveymonkey +from .source_survicate import SourceSurvicate +from .source_teamwork import SourceTeamwork from .source_tempo import SourceTempo from .source_the_guardian_api import SourceTheGuardianAPI from .source_tiktok_marketing import SourceTiktokMarketing @@ -199,6 +222,7 @@ from .source_us_census import SourceUsCensus from .source_vantage import SourceVantage from .source_webflow import SourceWebflow +from .source_when_i_work import SourceWhenIWork from .source_whisky_hunter import SourceWhiskyHunter from .source_wikipedia_pageviews import SourceWikipediaPageviews from .source_woocommerce import SourceWoocommerce @@ -207,7 +231,6 @@ from .source_yotpo import SourceYotpo from .source_youtube_analytics import SourceYoutubeAnalytics from .source_zendesk_chat import SourceZendeskChat -from .source_zendesk_sell import SourceZendeskSell from .source_zendesk_sunshine import SourceZendeskSunshine from .source_zendesk_support import SourceZendeskSupport from .source_zendesk_talk import SourceZendeskTalk @@ -216,4 +239,4 @@ from .source_zoom import SourceZoom from typing import Union -SourceConfiguration = Union[SourceAha, SourceAirbyte, SourceAircall, SourceAirtable, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppfollow, SourceAsana, SourceAuth0, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBambooHr, SourceBasecamp, SourceBigquery, SourceBingAds, SourceBraintree, SourceBraze, SourceBreezyHr, SourceCalendly, SourceCart, SourceChargebee, SourceChartmogul, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceCloseCom, SourceCoda, SourceCoinAPI, SourceCoinmarketcap, SourceConfigcat, SourceConfluence, SourceConvex, SourceCustomerIo, SourceDatadog, SourceDatascope, SourceDbt, SourceDelighted, SourceDixa, SourceDockerhub, SourceDremio, SourceDynamodb, SourceE2eTestCloud, SourceEmailoctopus, SourceExchangeRates, SourceFacebookMarketing, SourceFaker, SourceFauna, SourceFile, SourceFirebolt, SourceFleetio, SourceFreshcaller, SourceFreshdesk, SourceFreshsales, SourceGainsightPx, SourceGcs, SourceGetlago, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGnews, SourceGoldcast, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleDirectory, SourceGoogleDrive, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleWebfonts, SourceGreenhouse, SourceGridly, SourceHardcodedRecords, SourceHarvest, SourceHibob, SourceHighLevel, SourceHubplanner, SourceHubspot, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIp2whois, SourceIterable, SourceJira, SourceK6Cloud, SourceKlarna, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLeadfeeder, SourceLemlist, SourceLeverHiring, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLokalise, SourceLooker, SourceLuma, SourceMailchimp, SourceMailgun, SourceMailjetSms, SourceMarketo, SourceMetabase, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMixpanel, SourceMonday, SourceMongodbV2, SourceMssql, SourceMyHours, SourceMysql, SourceNetsuite, SourceNorthpassLms, SourceNotion, SourceNytimes, SourceOkta, SourceOmnisend, SourceOnesignal, SourceOracle, SourceOrb, SourceOrbit, SourceOutbrainAmplify, SourceOutreach, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePennylane, SourcePersistiq, SourcePexelsAPI, SourcePinterest, SourcePipedrive, SourcePlanhat, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourcePypi, SourceQualaroo, SourceRailz, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceReddit, SourceRedshift, SourceRetently, SourceRkiCovid, SourceRss, SourceS3, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSavvycal, SourceScryfall, SourceSecoda, SourceSendgrid, SourceSendinblue, SourceSenseforce, SourceSentry, SourceSftp, SourceSftpBulk, SourceShopify, SourceShortio, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartsheets, SourceSnapchatMarketing, SourceSnowflake, SourceSonarCloud, SourceSpacexAPI, SourceSquare, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceTempo, SourceTheGuardianAPI, SourceTiktokMarketing, SourceTrello, SourceTrustpilot, SourceTvmazeSchedule, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTypeform, SourceUsCensus, SourceVantage, SourceWebflow, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceXkcd, SourceYandexMetrica, SourceYotpo, SourceYoutubeAnalytics, SourceZendeskChat, SourceZendeskSell, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenloop, SourceZohoCrm, SourceZoom] +SourceConfiguration = Union[SourceAha, Source7shifts, SourceAirbyte, SourceAircall, SourceAirtable, SourceAmazonAds, SourceAmazonSellerPartner, SourceAmazonSqs, SourceAmplitude, SourceApifyDataset, SourceAppcues, SourceAppfigures, SourceAppfollow, SourceAsana, SourceAuth0, SourceAwsCloudtrail, SourceAzureBlobStorage, SourceAzureTable, SourceBambooHr, SourceBasecamp, SourceBigquery, SourceBingAds, SourceBitly, SourceBraintree, SourceBraze, SourceBreezyHr, SourceBrevo, SourceBuildkite, SourceBuzzsprout, SourceCalendly, SourceCanny, SourceCart, SourceChameleon, SourceChargebee, SourceChartmogul, SourceCimis, SourceClazar, SourceClickhouse, SourceClickupAPI, SourceClockify, SourceCloseCom, SourceCoda, SourceCoinAPI, SourceCoinmarketcap, SourceConfigcat, SourceConfluence, SourceConvex, SourceCustomerIo, SourceDatadog, SourceDatascope, SourceDbt, SourceDelighted, SourceDixa, SourceDockerhub, SourceDremio, SourceDynamodb, SourceEmailoctopus, SourceExchangeRates, SourceEzofficeinventory, SourceFacebookMarketing, SourceFaker, SourceFauna, SourceFile, SourceFirebolt, SourceFleetio, SourceFreshcaller, SourceFreshdesk, SourceFreshsales, SourceFront, SourceGainsightPx, SourceGcs, SourceGetlago, SourceGithub, SourceGitlab, SourceGlassfrog, SourceGnews, SourceGoldcast, SourceGoogleAds, SourceGoogleAnalyticsDataAPI, SourceGoogleDirectory, SourceGoogleDrive, SourceGooglePagespeedInsights, SourceGoogleSearchConsole, SourceGoogleSheets, SourceGoogleTasks, SourceGoogleWebfonts, SourceGreenhouse, SourceGridly, SourceGuru, SourceHardcodedRecords, SourceHarvest, SourceHeight, SourceHibob, SourceHighLevel, SourceHubplanner, SourceHubspot, SourceInsightly, SourceInstagram, SourceInstatus, SourceIntercom, SourceIp2whois, SourceIterable, SourceJira, SourceJotform, SourceK6Cloud, SourceKlarna, SourceKlaviyo, SourceKyve, SourceLaunchdarkly, SourceLeadfeeder, SourceLemlist, SourceLeverHiring, SourceLinkedinAds, SourceLinkedinPages, SourceLinnworks, SourceLokalise, SourceLooker, SourceLuma, SourceMailchimp, SourceMailgun, SourceMailjetSms, SourceMarketo, SourceMetabase, SourceMicrosoftOnedrive, SourceMicrosoftSharepoint, SourceMicrosoftTeams, SourceMixpanel, SourceMonday, SourceMongodbV2, SourceMssql, SourceMyHours, SourceMysql, SourceNetsuite, SourceNorthpassLms, SourceNotion, SourceNylas, SourceNytimes, SourceOkta, SourceOmnisend, SourceOnesignal, SourceOracle, SourceOrb, SourceOrbit, SourceOutbrainAmplify, SourceOutreach, SourcePaypalTransaction, SourcePaystack, SourcePendo, SourcePennylane, SourcePersistiq, SourcePexelsAPI, SourcePicqer, SourcePinterest, SourcePipedrive, SourcePiwik, SourcePlanhat, SourcePocket, SourcePokeapi, SourcePolygonStockAPI, SourcePostgres, SourcePosthog, SourcePostmarkapp, SourcePrestashop, SourceProductboard, SourceProductive, SourcePypi, SourceQualaroo, SourceRailz, SourceRecharge, SourceRecreation, SourceRecruitee, SourceRecurly, SourceReddit, SourceRedshift, SourceRetently, SourceRkiCovid, SourceRss, SourceS3, SourceSalesforce, SourceSalesloft, SourceSapFieldglass, SourceSavvycal, SourceScryfall, SourceSecoda, SourceSendgrid, SourceSendinblue, SourceSenseforce, SourceSentry, SourceSftp, SourceSftpBulk, SourceShopify, SourceShortcut, SourceShortio, SourceSlack, SourceSmaily, SourceSmartengage, SourceSmartsheets, SourceSnapchatMarketing, SourceSnowflake, SourceSonarCloud, SourceSpacexAPI, SourceSquare, SourceStrava, SourceStripe, SourceSurveySparrow, SourceSurveymonkey, SourceSurvicate, SourceTeamwork, SourceTempo, SourceTheGuardianAPI, SourceTiktokMarketing, SourceTrello, SourceTrustpilot, SourceTvmazeSchedule, SourceTwilio, SourceTwilioTaskrouter, SourceTwitter, SourceTypeform, SourceUsCensus, SourceVantage, SourceWebflow, SourceWhenIWork, SourceWhiskyHunter, SourceWikipediaPageviews, SourceWoocommerce, SourceXkcd, SourceYandexMetrica, SourceYotpo, SourceYoutubeAnalytics, SourceZendeskChat, SourceZendeskSunshine, SourceZendeskSupport, SourceZendeskTalk, SourceZenloop, SourceZohoCrm, SourceZoom] diff --git a/src/airbyte_api/sdkconfiguration.py b/src/airbyte_api/sdkconfiguration.py index 2bbc1149..16eda5f6 100644 --- a/src/airbyte_api/sdkconfiguration.py +++ b/src/airbyte_api/sdkconfiguration.py @@ -24,9 +24,9 @@ class SDKConfiguration: server_idx: Optional[int] = 0 language: str = 'python' openapi_doc_version: str = '1.0.0' - sdk_version: str = '0.52.0' - gen_version: str = '2.415.0' - user_agent: str = 'speakeasy-sdk/python 0.52.0 2.415.0 1.0.0 airbyte-api' + sdk_version: str = '0.52.1' + gen_version: str = '2.422.15' + user_agent: str = 'speakeasy-sdk/python 0.52.1 2.422.15 1.0.0 airbyte-api' retry_config: Optional[RetryConfig] = None def __post_init__(self):