<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">"use strict";(self.webpackChunkdocs_website=self.webpackChunkdocs_website||[]).push([[80053],{1109:e=&gt;{e.exports=JSON.parse('{"pluginId":"default","version":"current","label":"Next","banner":"unreleased","badge":true,"noIndex":false,"className":"docs-version-current","isLast":false,"docsSidebars":{"overviewSidebar":[{"type":"html","value":"&lt;div&gt;Getting Started&lt;/div&gt;","defaultStyle":true},{"label":"What Is DataHub?","type":"category","collapsed":true,"items":[{"type":"link","label":"Quickstart Guide","href":"/docs/next/quickstart","docId":"docs/quickstart"},{"type":"link","label":"Demo","href":"https://demo.datahubproject.io/"},{"type":"link","label":"Concepts","href":"/docs/next/what-is-datahub/datahub-concepts","docId":"docs/what-is-datahub/datahub-concepts"}],"collapsible":true,"href":"/docs/next/features"},{"type":"category","label":"Features","items":[{"type":"link","label":"Ingestion","href":"/docs/next/ui-ingestion","docId":"docs/ui-ingestion"},{"type":"link","label":"Search","href":"/docs/next/how/search","docId":"docs/how/search"},{"type":"link","label":"Schema History","href":"/docs/next/schema-history","docId":"docs/schema-history"},{"type":"link","label":"Domains","href":"/docs/next/domains","docId":"docs/domains"},{"type":"link","label":"Data Products","href":"/docs/next/dataproducts","docId":"docs/dataproducts"},{"type":"link","label":"Business Glossary","href":"/docs/next/glossary/business-glossary","docId":"docs/glossary/business-glossary"},{"type":"link","label":"Tags","href":"/docs/next/tags","docId":"docs/tags"},{"type":"link","label":"Custom Ownership Types","href":"/docs/next/ownership/ownership-types","docId":"docs/ownership/ownership-types"},{"type":"link","label":"Access Policies","href":"/docs/next/authorization/access-policies-guide","docId":"docs/authorization/access-policies-guide"},{"type":"link","label":"Dataset Usage &amp; Query History","href":"/docs/next/features/dataset-usage-and-query-history","docId":"docs/features/dataset-usage-and-query-history"},{"type":"link","label":"Posts","href":"/docs/next/posts","docId":"docs/posts"},{"type":"link","label":"Sync Status","href":"/docs/next/sync-status","docId":"docs/sync-status"},{"type":"link","label":"Lineage","href":"/docs/next/generated/lineage/lineage-feature-guide","docId":"docs/generated/lineage/lineage-feature-guide"},{"type":"link","label":"Metadata Tests","href":"/docs/next/tests/metadata-tests","className":"saasOnly","docId":"docs/tests/metadata-tests"},{"type":"link","label":"Lineage Impact Analysis","href":"/docs/next/act-on-metadata/impact-analysis","docId":"docs/act-on-metadata/impact-analysis"},{"label":"Observability","type":"category","items":[{"type":"link","label":"Freshness Assertions","href":"/docs/next/managed-datahub/observe/freshness-assertions","className":"saasOnly","docId":"docs/managed-datahub/observe/freshness-assertions"},{"type":"link","label":"Volume Assertions","href":"/docs/next/managed-datahub/observe/volume-assertions","className":"saasOnly","docId":"docs/managed-datahub/observe/volume-assertions"},{"type":"link","label":"Custom SQL Assertions","href":"/docs/next/managed-datahub/observe/custom-sql-assertions","className":"saasOnly","docId":"docs/managed-datahub/observe/custom-sql-assertions"},{"type":"link","label":"Column Assertions","href":"/docs/next/managed-datahub/observe/column-assertions","className":"saasOnly","docId":"docs/managed-datahub/observe/column-assertions"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Guides","items":[{"type":"link","label":"Managing Lineage via UI","href":"/docs/next/features/feature-guides/ui-lineage","docId":"docs/features/feature-guides/ui-lineage"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true,"href":"/docs/next/category/features"},{"label":"Managed DataHub","type":"category","collapsed":true,"items":[{"type":"link","label":"Getting Started with Acryl DataHub","href":"/docs/next/managed-datahub/welcome-acryl","docId":"docs/managed-datahub/welcome-acryl"},{"type":"link","label":"Configure Slack For Notifications","href":"/docs/next/managed-datahub/saas-slack-setup","className":"saasOnly","docId":"docs/managed-datahub/saas-slack-setup"},{"type":"link","label":"Approval Workflows","href":"/docs/next/managed-datahub/approval-workflows","className":"saasOnly","docId":"docs/managed-datahub/approval-workflows"},{"type":"category","label":"Metadata Ingestion With Acryl","items":[{"type":"link","label":"Ingestion","href":"/docs/next/managed-datahub/metadata-ingestion-with-acryl/ingestion","docId":"docs/managed-datahub/metadata-ingestion-with-acryl/ingestion"}],"collapsed":true,"collapsible":true},{"type":"category","label":"DataHub API","items":[{"type":"link","label":"Entity Events API","href":"/docs/next/managed-datahub/datahub-api/entity-events-api","className":"saasOnly","docId":"docs/managed-datahub/datahub-api/entity-events-api"},{"type":"category","label":"GraphQL API","items":[{"type":"link","label":"Getting Started","href":"/docs/next/managed-datahub/datahub-api/graphql-api/getting-started","docId":"docs/managed-datahub/datahub-api/graphql-api/getting-started"},{"type":"link","label":"Incidents API (Beta)","href":"/docs/next/managed-datahub/datahub-api/graphql-api/incidents-api-beta","className":"saasOnly","docId":"docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"Integrations","items":[{"type":"link","label":"AWS PrivateLink","href":"/docs/next/managed-datahub/integrations/aws-privatelink","className":"saasOnly","docId":"docs/managed-datahub/integrations/aws-privatelink"},{"type":"link","label":"OIDC SSO Integration","href":"/docs/next/managed-datahub/integrations/oidc-sso-integration","className":"saasOnly","docId":"docs/managed-datahub/integrations/oidc-sso-integration"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Operator Guide","items":[{"type":"link","label":"Setting up Remote Ingestion Executor on AWS","href":"/docs/next/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws","className":"saasOnly","docId":"docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws"},{"type":"link","label":"Setting up Events API on AWS EventBridge","href":"/docs/next/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge","className":"saasOnly","docId":"docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge"}],"collapsed":true,"collapsible":true},{"type":"link","label":"Acryl DataHub Chrome Extension","href":"/docs/next/managed-datahub/chrome-extension","docId":"docs/managed-datahub/chrome-extension"},{"type":"link","label":"Subscriptions &amp; Notifications","href":"/docs/next/managed-datahub/subscription-and-notification","className":"saasOnly","docId":"docs/managed-datahub/subscription-and-notification"},{"type":"category","label":"Managed DataHub Release History","items":[{"type":"link","label":"v0.2.13","href":"/docs/next/managed-datahub/release-notes/v_0_2_13","docId":"docs/managed-datahub/release-notes/v_0_2_13"},{"type":"link","label":"v0.2.12","href":"/docs/next/managed-datahub/release-notes/v_0_2_12","docId":"docs/managed-datahub/release-notes/v_0_2_12"},{"type":"link","label":"v0.2.11","href":"/docs/next/managed-datahub/release-notes/v_0_2_11","docId":"docs/managed-datahub/release-notes/v_0_2_11"},{"type":"link","label":"v0.2.10","href":"/docs/next/managed-datahub/release-notes/v_0_2_10","docId":"docs/managed-datahub/release-notes/v_0_2_10"},{"type":"link","label":"v0.2.9","href":"/docs/next/managed-datahub/release-notes/v_0_2_9","docId":"docs/managed-datahub/release-notes/v_0_2_9"},{"type":"link","label":"v0.2.8","href":"/docs/next/managed-datahub/release-notes/v_0_2_8","docId":"docs/managed-datahub/release-notes/v_0_2_8"},{"type":"link","label":"v0.2.7","href":"/docs/next/managed-datahub/release-notes/v_0_2_7","docId":"docs/managed-datahub/release-notes/v_0_2_7"},{"type":"link","label":"v0.2.6","href":"/docs/next/managed-datahub/release-notes/v_0_2_6","docId":"docs/managed-datahub/release-notes/v_0_2_6"},{"type":"link","label":"v0.2.5","href":"/docs/next/managed-datahub/release-notes/v_0_2_5","docId":"docs/managed-datahub/release-notes/v_0_2_5"},{"type":"link","label":"v0.2.4","href":"/docs/next/managed-datahub/release-notes/v_0_2_4","docId":"docs/managed-datahub/release-notes/v_0_2_4"},{"type":"link","label":"v0.2.3","href":"/docs/next/managed-datahub/release-notes/v_0_2_3","docId":"docs/managed-datahub/release-notes/v_0_2_3"},{"type":"link","label":"v0.2.2","href":"/docs/next/managed-datahub/release-notes/v_0_2_2","docId":"docs/managed-datahub/release-notes/v_0_2_2"},{"type":"link","label":"v0.2.1","href":"/docs/next/managed-datahub/release-notes/v_0_2_1","docId":"docs/managed-datahub/release-notes/v_0_2_1"},{"type":"link","label":"v0.2.0","href":"/docs/next/managed-datahub/release-notes/v_0_2_0","docId":"docs/managed-datahub/release-notes/v_0_2_0"},{"type":"link","label":"v0.1.73","href":"/docs/next/managed-datahub/release-notes/v_0_1_73","docId":"docs/managed-datahub/release-notes/v_0_1_73"},{"type":"link","label":"v0.1.72","href":"/docs/next/managed-datahub/release-notes/v_0_1_72","docId":"docs/managed-datahub/release-notes/v_0_1_72"},{"type":"link","label":"v0.1.70","href":"/docs/next/managed-datahub/release-notes/v_0_1_70","docId":"docs/managed-datahub/release-notes/v_0_1_70"},{"type":"link","label":"v0.1.69","href":"/docs/next/managed-datahub/release-notes/v_0_1_69","docId":"docs/managed-datahub/release-notes/v_0_1_69"}],"collapsed":true,"collapsible":true}],"collapsible":true,"href":"/docs/next/managed-datahub/managed-datahub-overview"},{"type":"html","value":"&lt;div&gt;Integrations&lt;/div&gt;","defaultStyle":true},{"type":"category","label":"Overview","items":[{"type":"link","label":"Recipe","href":"/docs/next/metadata-ingestion/recipe_overview","docId":"metadata-ingestion/recipe_overview"},{"type":"category","label":"Sinks","items":[{"type":"link","label":"Console","href":"/docs/next/metadata-ingestion/sink_docs/console","docId":"metadata-ingestion/sink_docs/console"},{"type":"link","label":"DataHub","href":"/docs/next/metadata-ingestion/sink_docs/datahub","docId":"metadata-ingestion/sink_docs/datahub"},{"type":"link","label":"File","href":"/docs/next/metadata-ingestion/sink_docs/file","docId":"metadata-ingestion/sink_docs/file"}],"collapsed":true,"collapsible":true,"href":"/docs/next/metadata-ingestion/sink_overview"},{"type":"category","label":"Transformers","items":[{"type":"link","label":"Dataset","href":"/docs/next/metadata-ingestion/docs/transformer/dataset_transformer","docId":"metadata-ingestion/docs/transformer/dataset_transformer"}],"collapsed":true,"collapsible":true,"href":"/docs/next/metadata-ingestion/docs/transformer/intro"}],"collapsed":true,"collapsible":true,"href":"/docs/next/metadata-ingestion"},{"type":"category","label":"Quickstart Guides","items":[{"type":"link","label":"CLI Ingestion","href":"/docs/next/metadata-ingestion/cli-ingestion","docId":"metadata-ingestion/cli-ingestion"},{"type":"category","label":"BigQuery","items":[{"type":"link","label":"Overview","href":"/docs/next/quick-ingestion-guides/bigquery/overview","docId":"docs/quick-ingestion-guides/bigquery/overview"},{"type":"link","label":"Setup","href":"/docs/next/quick-ingestion-guides/bigquery/setup","docId":"docs/quick-ingestion-guides/bigquery/setup"},{"type":"link","label":"Configuration","href":"/docs/next/quick-ingestion-guides/bigquery/configuration","docId":"docs/quick-ingestion-guides/bigquery/configuration"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Redshift","items":[{"type":"link","label":"Overview","href":"/docs/next/quick-ingestion-guides/redshift/overview","docId":"docs/quick-ingestion-guides/redshift/overview"},{"type":"link","label":"Setup","href":"/docs/next/quick-ingestion-guides/redshift/setup","docId":"docs/quick-ingestion-guides/redshift/setup"},{"type":"link","label":"Configuration","href":"/docs/next/quick-ingestion-guides/redshift/configuration","docId":"docs/quick-ingestion-guides/redshift/configuration"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Snowflake","items":[{"type":"link","label":"Overview","href":"/docs/next/quick-ingestion-guides/snowflake/overview","docId":"docs/quick-ingestion-guides/snowflake/overview"},{"type":"link","label":"Setup","href":"/docs/next/quick-ingestion-guides/snowflake/setup","docId":"docs/quick-ingestion-guides/snowflake/setup"},{"type":"link","label":"Configuration","href":"/docs/next/quick-ingestion-guides/snowflake/configuration","docId":"docs/quick-ingestion-guides/snowflake/configuration"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Tableau","items":[{"type":"link","label":"Overview","href":"/docs/next/quick-ingestion-guides/tableau/overview","docId":"docs/quick-ingestion-guides/tableau/overview"},{"type":"link","label":"Setup","href":"/docs/next/quick-ingestion-guides/tableau/setup","docId":"docs/quick-ingestion-guides/tableau/setup"},{"type":"link","label":"Configuration","href":"/docs/next/quick-ingestion-guides/tableau/configuration","docId":"docs/quick-ingestion-guides/tableau/configuration"}],"collapsed":true,"collapsible":true},{"type":"category","label":"PowerBI","items":[{"type":"link","label":"Overview","href":"/docs/next/quick-ingestion-guides/powerbi/overview","docId":"docs/quick-ingestion-guides/powerbi/overview"},{"type":"link","label":"Setup","href":"/docs/next/quick-ingestion-guides/powerbi/setup","docId":"docs/quick-ingestion-guides/powerbi/setup"},{"type":"link","label":"Configuration","href":"/docs/next/quick-ingestion-guides/powerbi/configuration","docId":"docs/quick-ingestion-guides/powerbi/configuration"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Looker","items":[{"type":"link","label":"Overview","href":"/docs/next/quick-ingestion-guides/looker/overview","docId":"docs/quick-ingestion-guides/looker/overview"},{"type":"link","label":"Setup","href":"/docs/next/quick-ingestion-guides/looker/setup","docId":"docs/quick-ingestion-guides/looker/setup"},{"type":"link","label":"Configuration","href":"/docs/next/quick-ingestion-guides/looker/configuration","docId":"docs/quick-ingestion-guides/looker/configuration"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"Sources","items":[{"type":"link","label":"Airflow","href":"/docs/next/lineage/airflow","docId":"docs/lineage/airflow"},{"type":"link","label":"Spark","href":"/docs/next/metadata-integration/java/spark-lineage","docId":"metadata-integration/java/spark-lineage/README"},{"type":"link","label":"Great Expectations","href":"/docs/next/metadata-ingestion/integration_docs/great-expectations","docId":"metadata-ingestion/integration_docs/great-expectations"},{"type":"link","label":"Protobuf Schemas","href":"/docs/next/metadata-integration/java/datahub-protobuf","docId":"metadata-integration/java/datahub-protobuf/README"},{"type":"link","label":"Athena","href":"/docs/next/generated/ingestion/sources/athena","docId":"docs/generated/ingestion/sources/athena"},{"type":"link","label":"Azure AD","href":"/docs/next/generated/ingestion/sources/azure-ad","docId":"docs/generated/ingestion/sources/azure-ad"},{"type":"link","label":"BigQuery","href":"/docs/next/generated/ingestion/sources/bigquery","docId":"docs/generated/ingestion/sources/bigquery"},{"type":"link","label":"Business Glossary","href":"/docs/next/generated/ingestion/sources/business-glossary","docId":"docs/generated/ingestion/sources/business-glossary"},{"type":"link","label":"ClickHouse","href":"/docs/next/generated/ingestion/sources/clickhouse","docId":"docs/generated/ingestion/sources/clickhouse"},{"type":"link","label":"CSV","href":"/docs/next/generated/ingestion/sources/csv","docId":"docs/generated/ingestion/sources/csv"},{"type":"link","label":"Databricks","href":"/docs/next/generated/ingestion/sources/databricks","docId":"docs/generated/ingestion/sources/databricks"},{"type":"link","label":"DataHub","href":"/docs/next/generated/ingestion/sources/datahub","docId":"docs/generated/ingestion/sources/datahub"},{"type":"link","label":"dbt","href":"/docs/next/generated/ingestion/sources/dbt","docId":"docs/generated/ingestion/sources/dbt"},{"type":"link","label":"Delta Lake","href":"/docs/next/generated/ingestion/sources/delta-lake","docId":"docs/generated/ingestion/sources/delta-lake"},{"type":"link","label":"Demo Data","href":"/docs/next/generated/ingestion/sources/demo-data","docId":"docs/generated/ingestion/sources/demo-data"},{"type":"link","label":"Druid","href":"/docs/next/generated/ingestion/sources/druid","docId":"docs/generated/ingestion/sources/druid"},{"type":"link","label":"DynamoDB","href":"/docs/next/generated/ingestion/sources/dynamodb","docId":"docs/generated/ingestion/sources/dynamodb"},{"type":"link","label":"Elasticsearch","href":"/docs/next/generated/ingestion/sources/elasticsearch","docId":"docs/generated/ingestion/sources/elasticsearch"},{"type":"link","label":"Feast","href":"/docs/next/generated/ingestion/sources/feast","docId":"docs/generated/ingestion/sources/feast"},{"type":"link","label":"File","href":"/docs/next/generated/ingestion/sources/file","docId":"docs/generated/ingestion/sources/file"},{"type":"link","label":"File Based Lineage","href":"/docs/next/generated/ingestion/sources/file-based-lineage","docId":"docs/generated/ingestion/sources/file-based-lineage"},{"type":"link","label":"Fivetran","href":"/docs/next/generated/ingestion/sources/fivetran","docId":"docs/generated/ingestion/sources/fivetran"},{"type":"link","label":"Glue","href":"/docs/next/generated/ingestion/sources/glue","docId":"docs/generated/ingestion/sources/glue"},{"type":"link","label":"Google Cloud Storage","href":"/docs/next/generated/ingestion/sources/gcs","docId":"docs/generated/ingestion/sources/gcs"},{"type":"link","label":"Hive","href":"/docs/next/generated/ingestion/sources/hive","docId":"docs/generated/ingestion/sources/hive"},{"type":"link","label":"Iceberg","href":"/docs/next/generated/ingestion/sources/iceberg","docId":"docs/generated/ingestion/sources/iceberg"},{"type":"link","label":"JSON Schemas","href":"/docs/next/generated/ingestion/sources/json-schema","docId":"docs/generated/ingestion/sources/json-schema"},{"type":"link","label":"Kafka","href":"/docs/next/generated/ingestion/sources/kafka","docId":"docs/generated/ingestion/sources/kafka"},{"type":"link","label":"Kafka Connect","href":"/docs/next/generated/ingestion/sources/kafka-connect","docId":"docs/generated/ingestion/sources/kafka-connect"},{"type":"link","label":"LDAP","href":"/docs/next/generated/ingestion/sources/ldap","docId":"docs/generated/ingestion/sources/ldap"},{"type":"link","label":"Looker","href":"/docs/next/generated/ingestion/sources/looker","docId":"docs/generated/ingestion/sources/looker"},{"type":"link","label":"MariaDB","href":"/docs/next/generated/ingestion/sources/mariadb","docId":"docs/generated/ingestion/sources/mariadb"},{"type":"link","label":"Metabase","href":"/docs/next/generated/ingestion/sources/metabase","docId":"docs/generated/ingestion/sources/metabase"},{"type":"link","label":"Microsoft SQL Server","href":"/docs/next/generated/ingestion/sources/mssql","docId":"docs/generated/ingestion/sources/mssql"},{"type":"link","label":"MLflow","href":"/docs/next/generated/ingestion/sources/mlflow","docId":"docs/generated/ingestion/sources/mlflow"},{"type":"link","label":"Mode","href":"/docs/next/generated/ingestion/sources/mode","docId":"docs/generated/ingestion/sources/mode"},{"type":"link","label":"MongoDB","href":"/docs/next/generated/ingestion/sources/mongodb","docId":"docs/generated/ingestion/sources/mongodb"},{"type":"link","label":"MySQL","href":"/docs/next/generated/ingestion/sources/mysql","docId":"docs/generated/ingestion/sources/mysql"},{"type":"link","label":"NiFi","href":"/docs/next/generated/ingestion/sources/nifi","docId":"docs/generated/ingestion/sources/nifi"},{"type":"link","label":"Okta","href":"/docs/next/generated/ingestion/sources/okta","docId":"docs/generated/ingestion/sources/okta"},{"type":"link","label":"OpenAPI","href":"/docs/next/generated/ingestion/sources/openapi","docId":"docs/generated/ingestion/sources/openapi"},{"type":"link","label":"Oracle","href":"/docs/next/generated/ingestion/sources/oracle","docId":"docs/generated/ingestion/sources/oracle"},{"type":"link","label":"Postgres","href":"/docs/next/generated/ingestion/sources/postgres","docId":"docs/generated/ingestion/sources/postgres"},{"type":"link","label":"PowerBI","href":"/docs/next/generated/ingestion/sources/powerbi","docId":"docs/generated/ingestion/sources/powerbi"},{"type":"link","label":"Presto","href":"/docs/next/generated/ingestion/sources/presto","docId":"docs/generated/ingestion/sources/presto"},{"type":"link","label":"Presto on Hive","href":"/docs/next/generated/ingestion/sources/presto-on-hive","docId":"docs/generated/ingestion/sources/presto-on-hive"},{"type":"link","label":"Pulsar","href":"/docs/next/generated/ingestion/sources/pulsar","docId":"docs/generated/ingestion/sources/pulsar"},{"type":"link","label":"Redash","href":"/docs/next/generated/ingestion/sources/redash","docId":"docs/generated/ingestion/sources/redash"},{"type":"link","label":"Redshift","href":"/docs/next/generated/ingestion/sources/redshift","docId":"docs/generated/ingestion/sources/redshift"},{"type":"link","label":"S3 Data Lake","href":"/docs/next/generated/ingestion/sources/s3","docId":"docs/generated/ingestion/sources/s3"},{"type":"link","label":"SageMaker","href":"/docs/next/generated/ingestion/sources/sagemaker","docId":"docs/generated/ingestion/sources/sagemaker"},{"type":"link","label":"Salesforce","href":"/docs/next/generated/ingestion/sources/salesforce","docId":"docs/generated/ingestion/sources/salesforce"},{"type":"link","label":"SAP HANA","href":"/docs/next/generated/ingestion/sources/hana","docId":"docs/generated/ingestion/sources/hana"},{"type":"link","label":"Snowflake","href":"/docs/next/generated/ingestion/sources/snowflake","docId":"docs/generated/ingestion/sources/snowflake"},{"type":"link","label":"SQL Queries","href":"/docs/next/generated/ingestion/sources/sql-queries","docId":"docs/generated/ingestion/sources/sql-queries"},{"type":"link","label":"SQLAlchemy","href":"/docs/next/generated/ingestion/sources/sqlalchemy","docId":"docs/generated/ingestion/sources/sqlalchemy"},{"type":"link","label":"Superset","href":"/docs/next/generated/ingestion/sources/superset","docId":"docs/generated/ingestion/sources/superset"},{"type":"link","label":"Tableau","href":"/docs/next/generated/ingestion/sources/tableau","docId":"docs/generated/ingestion/sources/tableau"},{"type":"link","label":"Teradata","href":"/docs/next/generated/ingestion/sources/teradata","docId":"docs/generated/ingestion/sources/teradata"},{"type":"link","label":"Trino","href":"/docs/next/generated/ingestion/sources/trino","docId":"docs/generated/ingestion/sources/trino"},{"type":"link","label":"Vertica","href":"/docs/next/generated/ingestion/sources/vertica","docId":"docs/generated/ingestion/sources/vertica"}],"collapsed":true,"collapsible":true,"href":"/docs/next/metadata-ingestion/source_overview"},{"type":"category","label":"Advanced Guides","items":[{"type":"category","label":"Scheduling Ingestion","items":[{"type":"link","label":"Introduction to Scheduling Metadata Ingestion","href":"/docs/next/metadata-ingestion/schedule_docs/intro","docId":"metadata-ingestion/schedule_docs/intro"},{"type":"link","label":"Using Cron","href":"/docs/next/metadata-ingestion/schedule_docs/cron","docId":"metadata-ingestion/schedule_docs/cron"},{"type":"link","label":"Using Airflow","href":"/docs/next/metadata-ingestion/schedule_docs/airflow","docId":"metadata-ingestion/schedule_docs/airflow"},{"type":"link","label":"Using Kubernetes","href":"/docs/next/metadata-ingestion/schedule_docs/kubernetes","docId":"metadata-ingestion/schedule_docs/kubernetes"}],"collapsed":true,"collapsible":true},{"type":"link","label":"Working With Platform Instances","href":"/docs/next/platform-instances","docId":"docs/platform-instances"},{"type":"link","label":"Stateful Ingestion","href":"/docs/next/metadata-ingestion/docs/dev_guides/stateful","docId":"metadata-ingestion/docs/dev_guides/stateful"},{"type":"link","label":"Classification","href":"/docs/next/metadata-ingestion/docs/dev_guides/classification","docId":"metadata-ingestion/docs/dev_guides/classification"},{"type":"link","label":"Adding Stateful Ingestion to a Source","href":"/docs/next/metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source","docId":"metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source"},{"type":"link","label":"SQL Profiling","href":"/docs/next/metadata-ingestion/docs/dev_guides/sql_profiles","docId":"metadata-ingestion/docs/dev_guides/sql_profiles"},{"type":"link","label":"Profiling ingestions","href":"/docs/next/metadata-ingestion/docs/dev_guides/profiling_ingestions","docId":"metadata-ingestion/docs/dev_guides/profiling_ingestions"}],"collapsed":true,"collapsible":true},{"type":"html","value":"&lt;div&gt;Deployment&lt;/div&gt;","defaultStyle":true},{"type":"category","label":"Deployment Guides","items":[{"type":"link","label":"Deploying to AWS","href":"/docs/next/deploy/aws","docId":"docs/deploy/aws"},{"type":"link","label":"Deploying to GCP","href":"/docs/next/deploy/gcp","docId":"docs/deploy/gcp"},{"type":"link","label":"Deploying to Azure","href":"/docs/next/deploy/azure","docId":"docs/deploy/azure"},{"type":"link","label":"Deploying with Docker","href":"/docs/next/docker","docId":"docker/README"},{"type":"link","label":"Deploying with Kubernetes","href":"/docs/next/deploy/kubernetes","docId":"docs/deploy/kubernetes"}],"collapsed":true,"collapsible":true,"href":"/docs/next/category/deployment-guides"},{"type":"category","label":"Advanced Guides","items":[{"type":"link","label":"Integrating with Confluent Cloud","href":"/docs/next/deploy/confluent-cloud","docId":"docs/deploy/confluent-cloud"},{"type":"link","label":"Deployment Environment Variables","href":"/docs/next/deploy/environment-vars","docId":"docs/deploy/environment-vars"},{"type":"link","label":"How to Extract Logs from DataHub Containers","href":"/docs/next/how/extract-container-logs","docId":"docs/how/extract-container-logs"}],"collapsed":true,"collapsible":true},{"type":"html","value":"&lt;div&gt;Admin&lt;/div&gt;","defaultStyle":true},{"type":"category","label":"Authentication","items":[{"type":"link","label":"Overview","href":"/docs/next/authentication","docId":"docs/authentication/README"},{"type":"link","label":"Concepts &amp; Key Components","href":"/docs/next/authentication/concepts","docId":"docs/authentication/concepts"},{"type":"link","label":"Changing the default user credentials","href":"/docs/next/authentication/changing-default-credentials","docId":"docs/authentication/changing-default-credentials"},{"type":"link","label":"Onboarding Users to DataHub","href":"/docs/next/authentication/guides/add-users","docId":"docs/authentication/guides/add-users"},{"type":"category","label":"Frontend Authentication","items":[{"type":"link","label":"JaaS Authentication","href":"/docs/next/authentication/guides/jaas","docId":"docs/authentication/guides/jaas"},{"type":"link","label":"OIDC Authentication","href":"/docs/next/authentication/guides/sso/configure-oidc-react","docId":"docs/authentication/guides/sso/configure-oidc-react"},{"type":"link","label":"OIDC Proxy Configuration","href":"/docs/next/authentication/guides/sso/configure-oidc-behind-proxy","docId":"docs/authentication/guides/sso/configure-oidc-behind-proxy"}],"collapsed":true,"collapsible":true},{"type":"link","label":"Metadata Service Authentication","href":"/docs/next/authentication/introducing-metadata-service-authentication","docId":"docs/authentication/introducing-metadata-service-authentication"},{"type":"link","label":"Personal Access Tokens","href":"/docs/next/authentication/personal-access-tokens","docId":"docs/authentication/personal-access-tokens"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Authorization","items":[{"type":"link","label":"Overview","href":"/docs/next/authorization","docId":"docs/authorization/README"},{"type":"link","label":"Roles","href":"/docs/next/authorization/roles","docId":"docs/authorization/roles"},{"type":"link","label":"Policies Guide","href":"/docs/next/authorization/policies","docId":"docs/authorization/policies"},{"type":"link","label":"Authorization using Groups","href":"/docs/next/authorization/groups","docId":"docs/authorization/groups"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Advanced Guides","items":[{"type":"link","label":"Removing Metadata from DataHub","href":"/docs/next/how/delete-metadata","docId":"docs/how/delete-metadata"},{"type":"link","label":"Configuring Authorization with Apache Ranger","href":"/docs/next/how/configuring-authorization-with-apache-ranger","docId":"docs/how/configuring-authorization-with-apache-ranger"},{"type":"link","label":"Taking backup of DataHub","href":"/docs/next/how/backup-datahub","docId":"docs/how/backup-datahub"},{"type":"link","label":"Restoring Search and Graph Indices from Local Database","href":"/docs/next/how/restore-indices","docId":"docs/how/restore-indices"},{"type":"link","label":"Configuring Database Retention","href":"/docs/next/advanced/db-retention","docId":"docs/advanced/db-retention"},{"type":"link","label":"Monitoring DataHub","href":"/docs/next/advanced/monitoring","docId":"docs/advanced/monitoring"},{"type":"link","label":"Telemetry","href":"/docs/next/deploy/telemetry","docId":"docs/deploy/telemetry"},{"type":"link","label":"Configuring Kafka","href":"/docs/next/how/kafka-config","docId":"docs/how/kafka-config"},{"type":"link","label":"No Code Upgrade (In-Place Migration Guide)","href":"/docs/next/advanced/no-code-upgrade","docId":"docs/advanced/no-code-upgrade"},{"type":"link","label":"Debugging by Jattach","href":"/docs/next/how/jattach-guide","docId":"docs/how/jattach-guide"}],"collapsed":true,"collapsible":true},{"type":"html","value":"&lt;div&gt;Developers&lt;/div&gt;","defaultStyle":true},{"type":"category","label":"Architecture","items":[{"type":"link","label":"Overview","href":"/docs/next/architecture/architecture","docId":"docs/architecture/architecture"},{"type":"link","label":"Components","href":"/docs/next/components","docId":"docs/components"},{"type":"link","label":"Ingestion Framework","href":"/docs/next/architecture/metadata-ingestion","docId":"docs/architecture/metadata-ingestion"},{"type":"link","label":"Serving Tier","href":"/docs/next/architecture/metadata-serving","docId":"docs/architecture/metadata-serving"},{"type":"link","label":"Docker Container Architecture","href":"/docs/next/architecture/docker-containers","docId":"docs/architecture/docker-containers"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Metadata Model","items":[{"type":"link","label":"The Metadata Model","href":"/docs/next/metadata-modeling/metadata-model","docId":"docs/modeling/metadata-model"},{"type":"link","label":"Extending the Metadata Model","href":"/docs/next/metadata-modeling/extending-the-metadata-model","docId":"docs/modeling/extending-the-metadata-model"},{"type":"link","label":"Metadata Events","href":"/docs/next/what/mxe","docId":"docs/what/mxe"},{"type":"category","label":"Entities","items":[{"type":"link","label":"Data Platform","href":"/docs/next/generated/metamodel/entities/dataplatform","docId":"docs/generated/metamodel/entities/dataPlatform"},{"type":"link","label":"Role","href":"/docs/next/generated/metamodel/entities/role","docId":"docs/generated/metamodel/entities/role"},{"type":"link","label":"Dataset","href":"/docs/next/generated/metamodel/entities/dataset","docId":"docs/generated/metamodel/entities/dataset"},{"type":"link","label":"DataJob","href":"/docs/next/generated/metamodel/entities/datajob","docId":"docs/generated/metamodel/entities/dataJob"},{"type":"link","label":"DataFlow","href":"/docs/next/generated/metamodel/entities/dataflow","docId":"docs/generated/metamodel/entities/dataFlow"},{"type":"link","label":"DataProcess","href":"/docs/next/generated/metamodel/entities/dataprocess","docId":"docs/generated/metamodel/entities/dataProcess"},{"type":"link","label":"DataProcessInstance","href":"/docs/next/generated/metamodel/entities/dataprocessinstance","docId":"docs/generated/metamodel/entities/dataProcessInstance"},{"type":"link","label":"Chart","href":"/docs/next/generated/metamodel/entities/chart","docId":"docs/generated/metamodel/entities/chart"},{"type":"link","label":"Dashboard","href":"/docs/next/generated/metamodel/entities/dashboard","docId":"docs/generated/metamodel/entities/dashboard"},{"type":"link","label":"Notebook","href":"/docs/next/generated/metamodel/entities/notebook","docId":"docs/generated/metamodel/entities/notebook"},{"type":"link","label":"Corpuser","href":"/docs/next/generated/metamodel/entities/corpuser","docId":"docs/generated/metamodel/entities/corpuser"},{"type":"link","label":"CorpGroup","href":"/docs/next/generated/metamodel/entities/corpgroup","docId":"docs/generated/metamodel/entities/corpGroup"},{"type":"link","label":"Domain","href":"/docs/next/generated/metamodel/entities/domain","docId":"docs/generated/metamodel/entities/domain"},{"type":"link","label":"Container","href":"/docs/next/generated/metamodel/entities/container","docId":"docs/generated/metamodel/entities/container"},{"type":"link","label":"Tag","href":"/docs/next/generated/metamodel/entities/tag","docId":"docs/generated/metamodel/entities/tag"},{"type":"link","label":"GlossaryTerm","href":"/docs/next/generated/metamodel/entities/glossaryterm","docId":"docs/generated/metamodel/entities/glossaryTerm"},{"type":"link","label":"GlossaryNode","href":"/docs/next/generated/metamodel/entities/glossarynode","docId":"docs/generated/metamodel/entities/glossaryNode"},{"type":"link","label":"Assertion","href":"/docs/next/generated/metamodel/entities/assertion","docId":"docs/generated/metamodel/entities/assertion"},{"type":"link","label":"MlModel","href":"/docs/next/generated/metamodel/entities/mlmodel","docId":"docs/generated/metamodel/entities/mlModel"},{"type":"link","label":"MlModelGroup","href":"/docs/next/generated/metamodel/entities/mlmodelgroup","docId":"docs/generated/metamodel/entities/mlModelGroup"},{"type":"link","label":"MlModelDeployment","href":"/docs/next/generated/metamodel/entities/mlmodeldeployment","docId":"docs/generated/metamodel/entities/mlModelDeployment"},{"type":"link","label":"MlFeatureTable","href":"/docs/next/generated/metamodel/entities/mlfeaturetable","docId":"docs/generated/metamodel/entities/mlFeatureTable"},{"type":"link","label":"MlFeature","href":"/docs/next/generated/metamodel/entities/mlfeature","docId":"docs/generated/metamodel/entities/mlFeature"},{"type":"link","label":"MlPrimaryKey","href":"/docs/next/generated/metamodel/entities/mlprimarykey","docId":"docs/generated/metamodel/entities/mlPrimaryKey"},{"type":"link","label":"Test","href":"/docs/next/generated/metamodel/entities/test","docId":"docs/generated/metamodel/entities/test"},{"type":"link","label":"SchemaField","href":"/docs/next/generated/metamodel/entities/schemafield","docId":"docs/generated/metamodel/entities/schemaField"},{"type":"link","label":"DataHubRole","href":"/docs/next/generated/metamodel/entities/datahubrole","docId":"docs/generated/metamodel/entities/dataHubRole"},{"type":"link","label":"Post","href":"/docs/next/generated/metamodel/entities/post","docId":"docs/generated/metamodel/entities/post"},{"type":"link","label":"DataHubView","href":"/docs/next/generated/metamodel/entities/datahubview","docId":"docs/generated/metamodel/entities/dataHubView"},{"type":"link","label":"Query","href":"/docs/next/generated/metamodel/entities/query","docId":"docs/generated/metamodel/entities/query"},{"type":"link","label":"DataProduct","href":"/docs/next/generated/metamodel/entities/dataproduct","docId":"docs/generated/metamodel/entities/dataProduct"},{"type":"link","label":"OwnershipType","href":"/docs/next/generated/metamodel/entities/ownershiptype","docId":"docs/generated/metamodel/entities/ownershipType"},{"type":"link","label":"DataContract","href":"/docs/next/generated/metamodel/entities/datacontract","docId":"docs/generated/metamodel/entities/dataContract"},{"type":"link","label":"DataHubPolicy","href":"/docs/next/generated/metamodel/entities/datahubpolicy","docId":"docs/generated/metamodel/entities/dataHubPolicy"},{"type":"link","label":"DataHubIngestionSource","href":"/docs/next/generated/metamodel/entities/datahubingestionsource","docId":"docs/generated/metamodel/entities/dataHubIngestionSource"},{"type":"link","label":"DataHubSecret","href":"/docs/next/generated/metamodel/entities/datahubsecret","docId":"docs/generated/metamodel/entities/dataHubSecret"},{"type":"link","label":"DataHubExecutionRequest","href":"/docs/next/generated/metamodel/entities/datahubexecutionrequest","docId":"docs/generated/metamodel/entities/dataHubExecutionRequest"},{"type":"link","label":"DataHubRetention","href":"/docs/next/generated/metamodel/entities/datahubretention","docId":"docs/generated/metamodel/entities/dataHubRetention"},{"type":"link","label":"DataPlatformInstance","href":"/docs/next/generated/metamodel/entities/dataplatforminstance","docId":"docs/generated/metamodel/entities/dataPlatformInstance"},{"type":"link","label":"Telemetry","href":"/docs/next/generated/metamodel/entities/telemetry","docId":"docs/generated/metamodel/entities/telemetry"},{"type":"link","label":"DataHubAccessToken","href":"/docs/next/generated/metamodel/entities/datahubaccesstoken","docId":"docs/generated/metamodel/entities/dataHubAccessToken"},{"type":"link","label":"DataHubUpgrade","href":"/docs/next/generated/metamodel/entities/datahubupgrade","docId":"docs/generated/metamodel/entities/dataHubUpgrade"},{"type":"link","label":"InviteToken","href":"/docs/next/generated/metamodel/entities/invitetoken","docId":"docs/generated/metamodel/entities/inviteToken"},{"type":"link","label":"GlobalSettings","href":"/docs/next/generated/metamodel/entities/globalsettings","docId":"docs/generated/metamodel/entities/globalSettings"},{"type":"link","label":"DataHubStepState","href":"/docs/next/generated/metamodel/entities/datahubstepstate","docId":"docs/generated/metamodel/entities/dataHubStepState"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"Developing on DataHub","items":[{"type":"link","label":"Local Development","href":"/docs/next/developers","docId":"docs/developers"},{"type":"link","label":"Using Docker Images During Development","href":"/docs/next/docker/development","docId":"docs/docker/development"},{"type":"link","label":"Developing on Metadata Ingestion","href":"/docs/next/metadata-ingestion/developing","docId":"metadata-ingestion/developing"},{"type":"link","label":"Creating a New GraphQL Endpoint in GMS","href":"/docs/next/api/graphql/graphql-endpoint-development","docId":"docs/api/graphql/graphql-endpoint-development"},{"type":"category","label":"Modules","items":[{"type":"link","label":"datahub-web-react","href":"/docs/next/datahub-web-react","docId":"datahub-web-react/README"},{"type":"link","label":"datahub-frontend","href":"/docs/next/datahub-frontend","docId":"datahub-frontend/README"},{"type":"link","label":"datahub-graphql-core","href":"/docs/next/datahub-graphql-core","docId":"datahub-graphql-core/README"},{"type":"link","label":"metadata-service","href":"/docs/next/metadata-service","docId":"metadata-service/README"},{"type":"link","label":"metadata-jobs:mae-consumer-job","href":"/docs/next/metadata-jobs/mae-consumer-job","docId":"metadata-jobs/mae-consumer-job/README"},{"type":"link","label":"metadata-jobs:mce-consumer-job","href":"/docs/next/metadata-jobs/mce-consumer-job","docId":"metadata-jobs/mce-consumer-job/README"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Troubleshooting","items":[{"type":"link","label":"Quickstart Debugging Guide","href":"/docs/next/troubleshooting/quickstart","docId":"docs/troubleshooting/quickstart"},{"type":"link","label":"Build Debugging Guide","href":"/docs/next/troubleshooting/build","docId":"docs/troubleshooting/build"},{"type":"link","label":"General Debugging Guide","href":"/docs/next/troubleshooting/general","docId":"docs/troubleshooting/general"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"Advanced Guides","items":[{"type":"link","label":"Datahub\'s Reporting Framework for Ingestion Job Telemetry","href":"/docs/next/metadata-ingestion/docs/dev_guides/reporting_telemetry","docId":"metadata-ingestion/docs/dev_guides/reporting_telemetry"},{"type":"link","label":"MetadataChangeProposal &amp; MetadataChangeLog Events","href":"/docs/next/advanced/mcp-mcl","docId":"docs/advanced/mcp-mcl"},{"type":"link","label":"Upgrade Docker Image","href":"/docs/next/docker/datahub-upgrade","docId":"docker/datahub-upgrade/README"},{"type":"link","label":"No Code Metadata","href":"/docs/next/advanced/no-code-modeling","docId":"docs/advanced/no-code-modeling"},{"type":"link","label":"React Analytics","href":"/docs/next/datahub-web-react/src/app/analytics","docId":"datahub-web-react/src/app/analytics/README"},{"type":"link","label":"Migrate Graph Service Implementation to Elasticsearch","href":"/docs/next/how/migrating-graph-service-implementation","docId":"docs/how/migrating-graph-service-implementation"},{"type":"link","label":"SchemaFieldPath Specification (Version 2)","href":"/docs/next/advanced/field-path-spec-v2","docId":"docs/advanced/field-path-spec-v2"},{"type":"link","label":"Adding a Metadata Ingestion Source","href":"/docs/next/metadata-ingestion/adding-source","docId":"metadata-ingestion/adding-source"},{"type":"link","label":"Using a Custom Ingestion Source","href":"/docs/next/how/add-custom-ingestion-source","docId":"docs/how/add-custom-ingestion-source"},{"type":"link","label":"Adding a custom Dataset Data Platform","href":"/docs/next/how/add-custom-data-platform","docId":"docs/how/add-custom-data-platform"},{"type":"link","label":"Browse Paths Upgrade (August 2022)","href":"/docs/next/advanced/browse-paths-upgrade","docId":"docs/advanced/browse-paths-upgrade"},{"type":"link","label":"Generating Browse Paths (V2)","href":"/docs/next/browsev2/browse-paths-v2","docId":"docs/browseV2/browse-paths-v2"},{"type":"link","label":"Plugins Guide","href":"/docs/next/plugins","docId":"docs/plugins"}],"collapsed":true,"collapsible":true},{"type":"html","value":"&lt;div&gt;API &amp; SDKs&lt;/div&gt;","defaultStyle":true},{"type":"link","label":"Overview","href":"/docs/next/api/datahub-apis","docId":"docs/api/datahub-apis"},{"type":"category","label":"API","items":[{"type":"category","label":"GraphQL API","items":[{"type":"link","label":"Overview","href":"/docs/next/api/graphql/overview","docId":"docs/api/graphql/overview"},{"type":"category","label":"Reference","items":[{"type":"link","label":"Queries","href":"/docs/next/graphql/queries","docId":"graphql/queries"},{"type":"link","label":"Mutations","href":"/docs/next/graphql/mutations","docId":"graphql/mutations"},{"type":"link","label":"Objects","href":"/docs/next/graphql/objects","docId":"graphql/objects"},{"type":"link","label":"Inputs","href":"/docs/next/graphql/inputObjects","docId":"graphql/inputObjects"},{"type":"link","label":"Interfaces","href":"/docs/next/graphql/interfaces","docId":"graphql/interfaces"},{"type":"link","label":"Unions","href":"/docs/next/graphql/unions","docId":"graphql/unions"},{"type":"link","label":"Enums","href":"/docs/next/graphql/enums","docId":"graphql/enums"},{"type":"link","label":"Scalars","href":"/docs/next/graphql/scalars","docId":"graphql/scalars"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Guides","items":[{"type":"link","label":"How To Set Up GraphQL","href":"/docs/next/api/graphql/how-to-set-up-graphql","docId":"docs/api/graphql/how-to-set-up-graphql"},{"type":"link","label":"Getting Started With GraphQL","href":"/docs/next/api/graphql/getting-started","docId":"docs/api/graphql/getting-started"},{"type":"link","label":"Access Token Management","href":"/docs/next/api/graphql/token-management","docId":"docs/api/graphql/token-management"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"link","label":"OpenAPI Guide","href":"/docs/next/api/openapi/openapi-usage-guide","docId":"docs/api/openapi/openapi-usage-guide"},{"type":"link","label":"Timeline API","href":"/docs/next/dev-guides/timeline","docId":"docs/dev-guides/timeline"},{"type":"category","label":"Rest.li API","items":[{"type":"link","label":"Rest.li API Guide","href":"/docs/next/api/restli/restli-overview","docId":"docs/api/restli/restli-overview"},{"type":"link","label":"Restore Indices","href":"/docs/next/api/restli/restore-indices","docId":"docs/api/restli/restore-indices"},{"type":"link","label":"Get Index Sizes","href":"/docs/next/api/restli/get-index-sizes","docId":"docs/api/restli/get-index-sizes"},{"type":"link","label":"Truncate Timeseries Aspect","href":"/docs/next/api/restli/truncate-time-series-aspect","docId":"docs/api/restli/truncate-time-series-aspect"},{"type":"link","label":"Get ElasticSearch Task Status Endpoint","href":"/docs/next/api/restli/get-elastic-task-status","docId":"docs/api/restli/get-elastic-task-status"},{"type":"link","label":"Evaluate Tests","href":"/docs/next/api/restli/evaluate-tests","docId":"docs/api/restli/evaluate-tests"},{"type":"link","label":"Aspect Versioning and Rest.li Modeling","href":"/docs/next/advanced/aspect-versioning","docId":"docs/advanced/aspect-versioning"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"category","label":"SDK","items":[{"type":"category","label":"Python SDK","items":[{"type":"link","label":"Python Emitter","href":"/docs/next/metadata-ingestion/as-a-library","docId":"metadata-ingestion/as-a-library"},{"type":"category","label":"Python SDK Reference","items":[{"type":"link","label":"Builder","href":"/docs/next/python-sdk/builder","docId":"python-sdk/builder"},{"type":"link","label":"Client","href":"/docs/next/python-sdk/clients","docId":"python-sdk/clients"},{"type":"link","label":"Models","href":"/docs/next/python-sdk/models","docId":"python-sdk/models"},{"type":"link","label":"URNs","href":"/docs/next/python-sdk/urns","docId":"python-sdk/urns"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true},{"type":"link","label":"Java SDK","href":"/docs/next/metadata-integration/java/as-a-library","docId":"metadata-integration/java/as-a-library"}],"collapsed":true,"collapsible":true},{"type":"category","label":"DataHub CLI","items":[{"type":"link","label":"Lite (Experimental)","href":"/docs/next/datahub_lite","docId":"docs/datahub_lite"}],"collapsed":true,"collapsible":true,"href":"/docs/next/cli"},{"type":"category","label":"Datahub Actions","items":[{"type":"link","label":"Introduction","href":"/docs/next/actions","docId":"docs/actions/README"},{"type":"link","label":"Quickstart","href":"/docs/next/actions/quickstart","docId":"docs/actions/quickstart"},{"type":"link","label":"Concepts","href":"/docs/next/actions/concepts","docId":"docs/actions/concepts"},{"type":"category","label":"Sources","items":[{"type":"link","label":"Kafka Event Source","href":"/docs/next/actions/sources/kafka-event-source","docId":"docs/actions/sources/kafka-event-source"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Events","items":[{"type":"link","label":"Entity Change Event V1","href":"/docs/next/actions/events/entity-change-event","docId":"docs/actions/events/entity-change-event"},{"type":"link","label":"Metadata Change Log Event V1","href":"/docs/next/actions/events/metadata-change-log-event","docId":"docs/actions/events/metadata-change-log-event"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Actions","items":[{"type":"link","label":"Ingestion Executor","href":"/docs/next/actions/actions/executor","docId":"docs/actions/actions/executor"},{"type":"link","label":"Hello World","href":"/docs/next/actions/actions/hello_world","docId":"docs/actions/actions/hello_world"},{"type":"link","label":"Slack","href":"/docs/next/actions/actions/slack","docId":"docs/actions/actions/slack"},{"type":"link","label":"Microsoft Teams","href":"/docs/next/actions/actions/teams","docId":"docs/actions/actions/teams"}],"collapsed":true,"collapsible":true},{"type":"category","label":"Guides","items":[{"type":"link","label":"Developing a Transformer","href":"/docs/next/actions/guides/developing-a-transformer","docId":"docs/actions/guides/developing-a-transformer"},{"type":"link","label":"Developing an Action","href":"/docs/next/actions/guides/developing-an-action","docId":"docs/actions/guides/developing-an-action"}],"collapsed":true,"collapsible":true}],"collapsed":true,"collapsible":true,"href":"/docs/next/act-on-metadata"},{"type":"category","label":"API &amp; SDK Guides","items":[{"type":"link","label":"But First, Semantics: Upsert versus Patch","href":"/docs/next/advanced/patch","docId":"docs/advanced/patch"},{"type":"link","label":"Dataset","href":"/docs/next/api/tutorials/datasets","docId":"docs/api/tutorials/datasets"},{"type":"link","label":"Lineage","href":"/docs/next/api/tutorials/lineage","docId":"docs/api/tutorials/lineage"},{"type":"link","label":"Tags","href":"/docs/next/api/tutorials/tags","docId":"docs/api/tutorials/tags"},{"type":"link","label":"Terms","href":"/docs/next/api/tutorials/terms","docId":"docs/api/tutorials/terms"},{"type":"link","label":"Ownership","href":"/docs/next/api/tutorials/owners","docId":"docs/api/tutorials/owners"},{"type":"link","label":"Domains","href":"/docs/next/api/tutorials/domains","docId":"docs/api/tutorials/domains"},{"type":"link","label":"Deprecation","href":"/docs/next/api/tutorials/deprecation","docId":"docs/api/tutorials/deprecation"},{"type":"link","label":"Description","href":"/docs/next/api/tutorials/descriptions","docId":"docs/api/tutorials/descriptions"},{"type":"link","label":"Custom Properties","href":"/docs/next/api/tutorials/custom-properties","docId":"docs/api/tutorials/custom-properties"},{"type":"link","label":"ML System","href":"/docs/next/api/tutorials/ml","docId":"docs/api/tutorials/ml"}],"collapsed":true,"collapsible":true},{"type":"html","value":"&lt;div&gt;Community&lt;/div&gt;","defaultStyle":true},{"label":"Community","type":"category","collapsed":true,"items":[{"type":"link","label":"Slack","href":"/docs/next/slack","docId":"docs/slack"},{"type":"link","label":"Town Halls","href":"/docs/next/townhalls","docId":"docs/townhalls"},{"type":"link","label":"Code of Conduct","href":"/docs/next/code_of_conduct","docId":"docs/CODE_OF_CONDUCT"},{"type":"link","label":"Contributing","href":"/docs/next/contributing","docId":"docs/CONTRIBUTING"},{"type":"link","label":"Articles &amp; Talks","href":"/docs/next/links","docId":"docs/links"},{"type":"link","label":"RFC Process","href":"/docs/next/rfc","docId":"docs/rfc"},{"type":"link","label":"Reporting Security Issues","href":"/docs/next/security","docId":"SECURITY"}],"collapsible":true,"href":"/docs/next/category/community"},{"type":"category","label":"Release History","items":[{"type":"link","label":"Releases","href":"/docs/next/releases","docId":"releases"},{"type":"link","label":"Updating DataHub","href":"/docs/next/how/updating-datahub","docId":"docs/how/updating-datahub"}],"collapsed":true,"collapsible":true}]},"docs":{"datahub-frontend/README":{"id":"datahub-frontend/README","title":"datahub-frontend","description":"DataHub frontend is a Play service written in Java. It is served as a mid-tier","sidebar":"overviewSidebar"},"datahub-graphql-core/README":{"id":"datahub-graphql-core/README","title":"datahub-graphql-core","description":"DataHub GraphQL API is a shared lib module containing a GraphQL API on top of the GMS service layer. It exposes a graph-based representation","sidebar":"overviewSidebar"},"datahub-web-react/README":{"id":"datahub-web-react/README","title":"datahub-web-react","description":"About","sidebar":"overviewSidebar"},"datahub-web-react/src/app/analytics/README":{"id":"datahub-web-react/src/app/analytics/README","title":"DataHub React Analytics","description":"About","sidebar":"overviewSidebar"},"docker/airflow/local_airflow":{"id":"docker/airflow/local_airflow","title":"Running Airflow locally with DataHub","description":"This guide is currently unmaintained. As of 0.10.0 the container described is not published alongside the DataHub CLI. If you\'d like to use it, please reach out to us on the community slack."},"docker/datahub-upgrade/README":{"id":"docker/datahub-upgrade/README","title":"DataHub Upgrade Docker Image","description":"This container is used to automatically apply upgrades from one version of DataHub to another.","sidebar":"overviewSidebar"},"docker/README":{"id":"docker/README","title":"Deploying with Docker","description":"Prerequisites","sidebar":"overviewSidebar"},"docs/act-on-metadata":{"id":"docs/act-on-metadata","title":"Act on Metadata Overview","description":"DataHub\'s metadata infrastructure is stream-oriented, meaning that all changes in metadata are communicated and reflected within the platform within seconds.","sidebar":"overviewSidebar"},"docs/act-on-metadata/impact-analysis":{"id":"docs/act-on-metadata/impact-analysis","title":"About DataHub Lineage Impact Analysis","description":"Lineage Impact Analysis is a powerful workflow for understanding the complete set of upstream and downstream dependencies of a Dataset, Dashboard, Chart, and many other DataHub Entities.","sidebar":"overviewSidebar"},"docs/actions/actions/executor":{"id":"docs/actions/actions/executor","title":"Ingestion Executor","description":"Certified","sidebar":"overviewSidebar"},"docs/actions/actions/hello_world":{"id":"docs/actions/actions/hello_world","title":"Hello World","description":"Certified","sidebar":"overviewSidebar"},"docs/actions/actions/slack":{"id":"docs/actions/actions/slack","title":"Slack","description":"|  |  |","sidebar":"overviewSidebar"},"docs/actions/actions/teams":{"id":"docs/actions/actions/teams","title":"Microsoft Teams","description":"|  |  |","sidebar":"overviewSidebar"},"docs/actions/concepts":{"id":"docs/actions/concepts","title":"Concepts","description":"The Actions framework includes pluggable components for filtering, transforming, and reacting to important DataHub, such as","sidebar":"overviewSidebar"},"docs/actions/events/entity-change-event":{"id":"docs/actions/events/entity-change-event","title":"Entity Change Event V1","description":"Event Type","sidebar":"overviewSidebar"},"docs/actions/events/metadata-change-log-event":{"id":"docs/actions/events/metadata-change-log-event","title":"Metadata Change Log Event V1","description":"Event Type","sidebar":"overviewSidebar"},"docs/actions/guides/developing-a-transformer":{"id":"docs/actions/guides/developing-a-transformer","title":"Developing a Transformer","description":"In this guide, we will outline each step to developing a custom Transformer for the DataHub Actions Framework.","sidebar":"overviewSidebar"},"docs/actions/guides/developing-an-action":{"id":"docs/actions/guides/developing-an-action","title":"Developing an Action","description":"In this guide, we will outline each step to developing a Action for the DataHub Actions Framework.","sidebar":"overviewSidebar"},"docs/actions/quickstart":{"id":"docs/actions/quickstart","title":"Quickstart","description":"Prerequisites","sidebar":"overviewSidebar"},"docs/actions/README":{"id":"docs/actions/README","title":"Introduction","description":"Welcome to DataHub Actions! The Actions framework makes responding to realtime changes in your Metadata Graph easy, enabling you to seamlessly integrate DataHub into a broader events-based architecture.","sidebar":"overviewSidebar"},"docs/actions/sources/kafka-event-source":{"id":"docs/actions/sources/kafka-event-source","title":"Kafka Event Source","description":"Overview","sidebar":"overviewSidebar"},"docs/advanced/aspect-versioning":{"id":"docs/advanced/aspect-versioning","title":"Aspect Versioning","description":"As each version of metadata aspect is immutable, any update to an existing aspect results in the creation of a new version. Typically one would expect the version number increases sequentially with the largest version number being the latest version, i.e. v1 (oldest), v2 (second oldest), ..., vN (latest). However, this approach results in major challenges in both rest.li modeling &amp; transaction isolation and therefore requires a rethinking.","sidebar":"overviewSidebar"},"docs/advanced/backfilling":{"id":"docs/advanced/backfilling","title":"Backfilling Search Index &amp; Graph DB","description":"WIP"},"docs/advanced/browse-paths-upgrade":{"id":"docs/advanced/browse-paths-upgrade","title":"Browse Paths Upgrade (August 2022)","description":"Background","sidebar":"overviewSidebar"},"docs/advanced/db-retention":{"id":"docs/advanced/db-retention","title":"Configuring Database Retention","description":"Goal","sidebar":"overviewSidebar"},"docs/advanced/derived-aspects":{"id":"docs/advanced/derived-aspects","title":"Derived Aspects","description":"WIP"},"docs/advanced/entity-hierarchy":{"id":"docs/advanced/entity-hierarchy","title":"Entity Hierarchy","description":"WIP"},"docs/advanced/field-path-spec-v2":{"id":"docs/advanced/field-path-spec-v2","title":"SchemaFieldPath Specification (Version 2)","description":"This document outlines the formal specification for the fieldPath member of","sidebar":"overviewSidebar"},"docs/advanced/high-cardinality":{"id":"docs/advanced/high-cardinality","title":"High Cardinality Relationships","description":"As explained in What is a Relationship, the raw metadata for forming relationships is captured directly inside of a Metadata Aspect. The most natural way to model this is using an array, e.g. a group membership aspect contains an array of user URNs. However, this poses some challenges when the cardinality of the relationship is expected to be large (say, greater than 10,000). The aspect becomes large in size, which leads to slow update and retrieval. It may even exceed the underlying limit of the document store, which is often in the range of a few MBs. Furthermore, sending large messages (&gt; 1MB) over Kafka requires special tuning and is generally discouraged."},"docs/advanced/mcp-mcl":{"id":"docs/advanced/mcp-mcl","title":"MetadataChangeProposal &amp; MetadataChangeLog Events","description":"Overview &amp; Vision","sidebar":"overviewSidebar"},"docs/advanced/monitoring":{"id":"docs/advanced/monitoring","title":"Monitoring DataHub","description":"Monitoring DataHub\'s system components is critical for operating and improving DataHub. This doc explains how to add","sidebar":"overviewSidebar"},"docs/advanced/no-code-modeling":{"id":"docs/advanced/no-code-modeling","title":"No Code Metadata","description":"Summary of changes","sidebar":"overviewSidebar"},"docs/advanced/no-code-upgrade":{"id":"docs/advanced/no-code-upgrade","title":"No Code Upgrade (In-Place Migration Guide)","description":"Summary of changes","sidebar":"overviewSidebar"},"docs/advanced/partial-update":{"id":"docs/advanced/partial-update","title":"Supporting Partial Aspect Update","description":"WIP"},"docs/advanced/patch":{"id":"docs/advanced/patch","title":"But First, Semantics: Upsert versus Patch","description":"Why Would You Use Patch","sidebar":"overviewSidebar"},"docs/advanced/pdl-best-practices":{"id":"docs/advanced/pdl-best-practices","title":"PDL Best Practices","description":"WIP"},"docs/api/datahub-apis":{"id":"docs/api/datahub-apis","title":"Which DataHub API is for me?","description":"DataHub supplys several APIs to manipulate metadata on the platform. These are our most-to-least recommended approaches:","sidebar":"overviewSidebar"},"docs/api/graphql/getting-started":{"id":"docs/api/graphql/getting-started","title":"Getting Started With GraphQL","description":"Reading an Entity: Queries","sidebar":"overviewSidebar"},"docs/api/graphql/graphql-endpoint-development":{"id":"docs/api/graphql/graphql-endpoint-development","title":"Creating a New GraphQL Endpoint in GMS","description":"This guide will walk you through how to add a new GraphQL endpoint in GMS.","sidebar":"overviewSidebar"},"docs/api/graphql/how-to-set-up-graphql":{"id":"docs/api/graphql/how-to-set-up-graphql","title":"How To Set Up GraphQL","description":"Preparing Local Datahub Deployment","sidebar":"overviewSidebar"},"docs/api/graphql/overview":{"id":"docs/api/graphql/overview","title":"DataHub GraphQL API","description":"DataHub provides a rich graphql API for programmatically interacting with the Entities &amp; Relationships comprising your organization\'s Metadata Graph.","sidebar":"overviewSidebar"},"docs/api/graphql/token-management":{"id":"docs/api/graphql/token-management","title":"Access Token Management","description":"DataHub provides the following graphql endpoints for managing Access Tokens. In this page you will see examples as well","sidebar":"overviewSidebar"},"docs/api/openapi/openapi-usage-guide":{"id":"docs/api/openapi/openapi-usage-guide","title":"DataHub OpenAPI Guide","description":"Why OpenAPI","sidebar":"overviewSidebar"},"docs/api/restli/evaluate-tests":{"id":"docs/api/restli/evaluate-tests","title":"Evaluate Tests Endpoint","description":"You can do a HTTP POST request to /gms/test?action=evaluate endpoint with the urn as part of JSON Payload to run metadata tests for the particular URN.","sidebar":"overviewSidebar"},"docs/api/restli/get-elastic-task-status":{"id":"docs/api/restli/get-elastic-task-status","title":"Get ElasticSearch Task Status Endpoint","description":"You can do a HTTP POST request to /gms/operations?action=getEsTaskStatus endpoint to see the status of the input task running in ElasticSearch. For example, the task ID given by the truncateTimeseriesAspect endpoint. The task ID can be passed in as a string with node name and task ID separated by a colon (as is output by the previous API), or the node name and task ID parameters separately.","sidebar":"overviewSidebar"},"docs/api/restli/get-index-sizes":{"id":"docs/api/restli/get-index-sizes","title":"Get Index Sizes Endpoint","description":"You can do a HTTP POST request to /gms/operations?action=getIndexSizes endpoint with no parameters to see the size of indices in ElasticSearch. For now, only timeseries indices are supported, as they can grow indefinitely, and the truncateTimeseriesAspect endpoint is provided to clean up old entries. This endpoint can be used in conjunction with the cleanup endpoint to see which indices are the largest before truncation.","sidebar":"overviewSidebar"},"docs/api/restli/restli-overview":{"id":"docs/api/restli/restli-overview","title":"Rest.li API","description":"You can access basic documentation on the API endpoints by opening the /restli/docs endpoint in the browser.","sidebar":"overviewSidebar"},"docs/api/restli/restore-indices":{"id":"docs/api/restli/restore-indices","title":"Restore Indices Endpoint","description":"You can do a HTTP POST request to /gms/operations?action=restoreIndices endpoint with the urn as part of JSON Payload to restore indices for the particular URN, or with the urnLike regex to restore for batchSize URNs matching the pattern starting from start.","sidebar":"overviewSidebar"},"docs/api/restli/truncate-time-series-aspect":{"id":"docs/api/restli/truncate-time-series-aspect","title":"Truncate Timeseries Index Endpoint","description":"You can do a HTTP POST request to /gms/operations?action=truncateTimeseriesAspect endpoint to manage the size of a time series index by removing entries older than a certain timestamp, thereby truncating the table to only the entries needed, to save space. The getIndexSizes endpoint can be used to identify the largest indices. The output includes the index parameters needed for this function.","sidebar":"overviewSidebar"},"docs/api/tutorials/custom-properties":{"id":"docs/api/tutorials/custom-properties","title":"Custom Properties","description":"Why Would You Use Custom Properties on Datasets?","sidebar":"overviewSidebar"},"docs/api/tutorials/datasets":{"id":"docs/api/tutorials/datasets","title":"Dataset","description":"Why Would You Use Datasets?","sidebar":"overviewSidebar"},"docs/api/tutorials/deprecation":{"id":"docs/api/tutorials/deprecation","title":"Deprecation","description":"Why Would You Deprecate Datasets?","sidebar":"overviewSidebar"},"docs/api/tutorials/descriptions":{"id":"docs/api/tutorials/descriptions","title":"Description","description":"Why Would You Use Description on Dataset?","sidebar":"overviewSidebar"},"docs/api/tutorials/domains":{"id":"docs/api/tutorials/domains","title":"Domains","description":"Why Would You Use Domains?","sidebar":"overviewSidebar"},"docs/api/tutorials/lineage":{"id":"docs/api/tutorials/lineage","title":"Lineage","description":"Why Would You Use Lineage?","sidebar":"overviewSidebar"},"docs/api/tutorials/ml":{"id":"docs/api/tutorials/ml","title":"ML System","description":"Why Would You Integrate ML System with DataHub?","sidebar":"overviewSidebar"},"docs/api/tutorials/owners":{"id":"docs/api/tutorials/owners","title":"Ownership","description":"Why Would You Use Users and Groups?","sidebar":"overviewSidebar"},"docs/api/tutorials/tags":{"id":"docs/api/tutorials/tags","title":"Tags","description":"Why Would You Use Tags on Datasets?","sidebar":"overviewSidebar"},"docs/api/tutorials/terms":{"id":"docs/api/tutorials/terms","title":"Terms","description":"Why Would You Use Terms on Datasets?","sidebar":"overviewSidebar"},"docs/architecture/architecture":{"id":"docs/architecture/architecture","title":"Overview","description":"DataHub is a 3rd generation Metadata Platform that enables Data Discovery, Collaboration, Governance, and end-to-end Observability","sidebar":"overviewSidebar"},"docs/architecture/docker-containers":{"id":"docs/architecture/docker-containers","title":"Docker Container Architecture","description":"When running DataHub via docker-compose. or helm, the following is a diagram of the containers involved","sidebar":"overviewSidebar"},"docs/architecture/metadata-ingestion":{"id":"docs/architecture/metadata-ingestion","title":"Ingestion Framework","description":"DataHub supports an extremely flexible ingestion architecture that can support push, pull, asynchronous and synchronous models.","sidebar":"overviewSidebar"},"docs/architecture/metadata-serving":{"id":"docs/architecture/metadata-serving","title":"Serving Tier","description":"The figure below shows the high-level system diagram for DataHub\'s Serving Tier.","sidebar":"overviewSidebar"},"docs/authentication/changing-default-credentials":{"id":"docs/authentication/changing-default-credentials","title":"Changing the default user credentials","description":"Default User Credential","sidebar":"overviewSidebar"},"docs/authentication/concepts":{"id":"docs/authentication/concepts","title":"Concepts &amp; Key Components","description":"We introduced a few important concepts to the Metadata Service to make authentication work:","sidebar":"overviewSidebar"},"docs/authentication/guides/add-users":{"id":"docs/authentication/guides/add-users","title":"Onboarding Users to DataHub","description":"New user accounts can be provisioned on DataHub in 3 ways:","sidebar":"overviewSidebar"},"docs/authentication/guides/jaas":{"id":"docs/authentication/guides/jaas","title":"JaaS Authentication","description":"Overview","sidebar":"overviewSidebar"},"docs/authentication/guides/sso/configure-oidc-behind-proxy":{"id":"docs/authentication/guides/sso/configure-oidc-behind-proxy","title":"OIDC Proxy Configuration","description":"Authored on 22/08/2023","sidebar":"overviewSidebar"},"docs/authentication/guides/sso/configure-oidc-react":{"id":"docs/authentication/guides/sso/configure-oidc-react","title":"OIDC Authentication","description":"The DataHub React application supports OIDC authentication built on top of the Pac4j Play library.","sidebar":"overviewSidebar"},"docs/authentication/introducing-metadata-service-authentication":{"id":"docs/authentication/introducing-metadata-service-authentication","title":"Metadata Service Authentication","description":"Introduction","sidebar":"overviewSidebar"},"docs/authentication/personal-access-tokens":{"id":"docs/authentication/personal-access-tokens","title":"About DataHub Personal Access Tokens","description":"Personal Access Tokens, or PATs for short, allow users to represent themselves in code and programmatically use DataHub\'s APIs in deployments where security is a concern.","sidebar":"overviewSidebar"},"docs/authentication/README":{"id":"docs/authentication/README","title":"Overview","description":"Authentication is the process of verifying the identity of a user or service. There are two","sidebar":"overviewSidebar"},"docs/authorization/access-policies-guide":{"id":"docs/authorization/access-policies-guide","title":"About DataHub Access Policies","description":"Access Policies define who can do what to which resources. In conjunction with Roles, Access Policies determine what users are allowed to do on DataHub.","sidebar":"overviewSidebar"},"docs/authorization/groups":{"id":"docs/authorization/groups","title":"Authorization using Groups","description":"Introduction","sidebar":"overviewSidebar"},"docs/authorization/policies":{"id":"docs/authorization/policies","title":"Policies Guide","description":"Introduction","sidebar":"overviewSidebar"},"docs/authorization/README":{"id":"docs/authorization/README","title":"Overview","description":"Authorization specifies what accesses an authenticated user has within a system.","sidebar":"overviewSidebar"},"docs/authorization/roles":{"id":"docs/authorization/roles","title":"About DataHub Roles","description":"DataHub provides the ability to use Roles to manage permissions.","sidebar":"overviewSidebar"},"docs/browseV2/browse-paths-v2":{"id":"docs/browseV2/browse-paths-v2","title":"Generating Browse Paths (V2)","description":"Introduction","sidebar":"overviewSidebar"},"docs/cli":{"id":"docs/cli","title":"DataHub CLI","description":"DataHub comes with a friendly cli called datahub that allows you to perform a lot of common operations using just the command line. Acryl Data maintains the pypi package for datahub.","sidebar":"overviewSidebar"},"docs/CODE_OF_CONDUCT":{"id":"docs/CODE_OF_CONDUCT","title":"Code of Conduct","description":"Our Pledge","sidebar":"overviewSidebar"},"docs/components":{"id":"docs/components","title":"Components","description":"The DataHub platform consists of the components shown in the following diagram.","sidebar":"overviewSidebar"},"docs/CONTRIBUTING":{"id":"docs/CONTRIBUTING","title":"Contributing","description":"We always welcome contributions to help make DataHub better. Take a moment to read this document if you would like to contribute.","sidebar":"overviewSidebar"},"docs/datahub_lite":{"id":"docs/datahub_lite","title":"DataHub Lite (Experimental)","description":"What is it?","sidebar":"overviewSidebar"},"docs/dataproducts":{"id":"docs/dataproducts","title":"Data Products","description":"\ud83e\udd1d Version compatibility","sidebar":"overviewSidebar"},"docs/deploy/aws":{"id":"docs/deploy/aws","title":"Deploying to AWS","description":"The following is a set of instructions to quickstart DataHub on AWS Elastic Kubernetes Service (EKS). Note, the guide","sidebar":"overviewSidebar"},"docs/deploy/azure":{"id":"docs/deploy/azure","title":"Deploying to Azure","description":"The following is a set of instructions to quickstart DataHub on Azure Kubernetes Service (AKS). Note, the guide","sidebar":"overviewSidebar"},"docs/deploy/confluent-cloud":{"id":"docs/deploy/confluent-cloud","title":"Integrating with Confluent Cloud","description":"DataHub provides the ability to easily leverage Confluent Cloud as your Kafka provider. To do so, you\'ll need to configure DataHub to talk to a broker and schema registry hosted by Confluent.","sidebar":"overviewSidebar"},"docs/deploy/environment-vars":{"id":"docs/deploy/environment-vars","title":"Deployment Environment Variables","description":"The following is a summary of a few important environment variables which expose various levers which control how","sidebar":"overviewSidebar"},"docs/deploy/gcp":{"id":"docs/deploy/gcp","title":"Deploying to GCP","description":"The following is a set of instructions to quickstart DataHub on GCP Google Kubernetes Engine (GKE). Note, the guide","sidebar":"overviewSidebar"},"docs/deploy/kubernetes":{"id":"docs/deploy/kubernetes","title":"Deploying with Kubernetes","description":"Introduction","sidebar":"overviewSidebar"},"docs/deploy/telemetry":{"id":"docs/deploy/telemetry","title":"DataHub Telemetry","description":"Overview of DataHub Telemetry","sidebar":"overviewSidebar"},"docs/dev-guides/timeline":{"id":"docs/dev-guides/timeline","title":"Timeline API","description":"The Timeline API supports viewing version history of schemas, documentation, tags, glossary terms, and other updates","sidebar":"overviewSidebar"},"docs/developers":{"id":"docs/developers","title":"Local Development","description":"Requirements","sidebar":"overviewSidebar"},"docs/docker/development":{"id":"docs/docker/development","title":"Using Docker Images During Development","description":"We\'ve created a special docker-compose.dev.yml override file that should configure docker images to be easier to use","sidebar":"overviewSidebar"},"docs/domains":{"id":"docs/domains","title":"About DataHub Domains","description":"Starting in version 0.8.25, DataHub supports grouping data assets into logical collections called Domains. Domains are curated, top-level folders or categories where related assets can be explicitly grouped. Management of Domains can be centralized, or distributed out to Domain owners Currently, an asset can belong to only one Domain at a time.","sidebar":"overviewSidebar"},"docs/features":{"id":"docs/features","title":"What is DataHub?","description":"DataHub is a modern data catalog built to enable end-to-end data discovery, data observability, and data governance.","sidebar":"overviewSidebar"},"docs/features/dataset-usage-and-query-history":{"id":"docs/features/dataset-usage-and-query-history","title":"About DataHub Dataset Usage &amp; Query History","description":"Dataset Usage &amp; Query History can give dataset-level information about the top queries which referenced a dataset.","sidebar":"overviewSidebar"},"docs/features/feature-guides/ui-lineage":{"id":"docs/features/feature-guides/ui-lineage","title":"Managing Lineage via UI","description":"Viewing lineage","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/athena":{"id":"docs/generated/ingestion/sources/athena","title":"Athena","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/azure-ad":{"id":"docs/generated/ingestion/sources/azure-ad","title":"Azure AD","description":"Extracting DataHub Users","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/bigquery":{"id":"docs/generated/ingestion/sources/bigquery","title":"BigQuery","description":"Ingesting metadata from BigQuery requires using the bigquery module.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/business-glossary":{"id":"docs/generated/ingestion/sources/business-glossary","title":"Business Glossary","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/clickhouse":{"id":"docs/generated/ingestion/sources/clickhouse","title":"ClickHouse","description":"There are 2 sources that provide integration with ClickHouse","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/csv":{"id":"docs/generated/ingestion/sources/csv","title":"CSV","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/databricks":{"id":"docs/generated/ingestion/sources/databricks","title":"Databricks","description":"DataHub supports integration with Databricks ecosystem using a multitude of connectors, depending on your exact setup.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/datahub":{"id":"docs/generated/ingestion/sources/datahub","title":"DataHub","description":"Migrate data from one DataHub instance to another.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/dbt":{"id":"docs/generated/ingestion/sources/dbt","title":"dbt","description":"There are 2 sources that provide integration with dbt","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/delta-lake":{"id":"docs/generated/ingestion/sources/delta-lake","title":"Delta Lake","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/demo-data":{"id":"docs/generated/ingestion/sources/demo-data","title":"Demo Data","description":"This source loads sample data into DataHub. It is intended for demo and testing purposes only.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/druid":{"id":"docs/generated/ingestion/sources/druid","title":"Druid","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/dynamodb":{"id":"docs/generated/ingestion/sources/dynamodb","title":"DynamoDB","description":"Testing","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/elasticsearch":{"id":"docs/generated/ingestion/sources/elasticsearch","title":"Elasticsearch","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/feast":{"id":"docs/generated/ingestion/sources/feast","title":"Feast","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/file":{"id":"docs/generated/ingestion/sources/file","title":"File","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/file-based-lineage":{"id":"docs/generated/ingestion/sources/file-based-lineage","title":"File Based Lineage","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/fivetran":{"id":"docs/generated/ingestion/sources/fivetran","title":"Fivetran","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/gcs":{"id":"docs/generated/ingestion/sources/gcs","title":"Google Cloud Storage","description":"This connector ingests Google Cloud Storage datasets into DataHub. It allows mapping an individual file or a folder of files to a dataset in DataHub.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/glue":{"id":"docs/generated/ingestion/sources/glue","title":"Glue","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/hana":{"id":"docs/generated/ingestion/sources/hana","title":"SAP HANA","description":"Testing","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/hive":{"id":"docs/generated/ingestion/sources/hive","title":"Hive","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/iceberg":{"id":"docs/generated/ingestion/sources/iceberg","title":"Iceberg","description":"Testing","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/json-schema":{"id":"docs/generated/ingestion/sources/json-schema","title":"JSON Schemas","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/kafka":{"id":"docs/generated/ingestion/sources/kafka","title":"Kafka","description":"Extract Topics &amp; Schemas from Apache Kafka or Confluent Cloud.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/kafka-connect":{"id":"docs/generated/ingestion/sources/kafka-connect","title":"Kafka Connect","description":"Integration Details","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/ldap":{"id":"docs/generated/ingestion/sources/ldap","title":"LDAP","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/looker":{"id":"docs/generated/ingestion/sources/looker","title":"Looker","description":"There are 2 sources that provide integration with Looker","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/mariadb":{"id":"docs/generated/ingestion/sources/mariadb","title":"MariaDB","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/metabase":{"id":"docs/generated/ingestion/sources/metabase","title":"Metabase","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/mlflow":{"id":"docs/generated/ingestion/sources/mlflow","title":"MLflow","description":"Testing","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/mode":{"id":"docs/generated/ingestion/sources/mode","title":"Mode","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/mongodb":{"id":"docs/generated/ingestion/sources/mongodb","title":"MongoDB","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/mssql":{"id":"docs/generated/ingestion/sources/mssql","title":"Microsoft SQL Server","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/mysql":{"id":"docs/generated/ingestion/sources/mysql","title":"MySQL","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/nifi":{"id":"docs/generated/ingestion/sources/nifi","title":"NiFi","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/okta":{"id":"docs/generated/ingestion/sources/okta","title":"Okta","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/openapi":{"id":"docs/generated/ingestion/sources/openapi","title":"OpenAPI","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/oracle":{"id":"docs/generated/ingestion/sources/oracle","title":"Oracle","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/postgres":{"id":"docs/generated/ingestion/sources/postgres","title":"Postgres","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/powerbi":{"id":"docs/generated/ingestion/sources/powerbi","title":"PowerBI","description":"There are 2 sources that provide integration with PowerBI","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/presto":{"id":"docs/generated/ingestion/sources/presto","title":"Presto","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/presto-on-hive":{"id":"docs/generated/ingestion/sources/presto-on-hive","title":"Presto on Hive","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/pulsar":{"id":"docs/generated/ingestion/sources/pulsar","title":"Pulsar","description":"Integration Details","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/redash":{"id":"docs/generated/ingestion/sources/redash","title":"Redash","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/redshift":{"id":"docs/generated/ingestion/sources/redshift","title":"Redshift","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/s3":{"id":"docs/generated/ingestion/sources/s3","title":"S3 Data Lake","description":"This connector ingests S3 datasets into DataHub. It allows mapping an individual file or a folder of files to a dataset in DataHub.","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/sagemaker":{"id":"docs/generated/ingestion/sources/sagemaker","title":"SageMaker","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/salesforce":{"id":"docs/generated/ingestion/sources/salesforce","title":"Salesforce","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/snowflake":{"id":"docs/generated/ingestion/sources/snowflake","title":"Snowflake","description":"Snowflake Ingestion through the UI","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/sql-queries":{"id":"docs/generated/ingestion/sources/sql-queries","title":"SQL Queries","description":"Incubating","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/sqlalchemy":{"id":"docs/generated/ingestion/sources/sqlalchemy","title":"SQLAlchemy","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/superset":{"id":"docs/generated/ingestion/sources/superset","title":"Superset","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/tableau":{"id":"docs/generated/ingestion/sources/tableau","title":"Tableau","description":"Certified","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/teradata":{"id":"docs/generated/ingestion/sources/teradata","title":"Teradata","description":"Testing","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/trino":{"id":"docs/generated/ingestion/sources/trino","title":"Trino","description":"There are 2 sources that provide integration with Trino","sidebar":"overviewSidebar"},"docs/generated/ingestion/sources/vertica":{"id":"docs/generated/ingestion/sources/vertica","title":"Vertica","description":"Integration Details","sidebar":"overviewSidebar"},"docs/generated/lineage/lineage-feature-guide":{"id":"docs/generated/lineage/lineage-feature-guide","title":"About DataHub Lineage","description":"Lineage is used to capture data dependencies within an organization. It allows you to track the inputs from which a data asset is derived, along with the data assets that depend on it downstream.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/assertion":{"id":"docs/generated/metamodel/entities/assertion","title":"Assertion","description":"Assertion entity represents a data quality rule applied on dataset.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/chart":{"id":"docs/generated/metamodel/entities/chart","title":"Chart","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/container":{"id":"docs/generated/metamodel/entities/container","title":"Container","description":"A container of related data assets.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/corpGroup":{"id":"docs/generated/metamodel/entities/corpGroup","title":"CorpGroup","description":"CorpGroup represents an identity of a group of users in the enterprise.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/corpuser":{"id":"docs/generated/metamodel/entities/corpuser","title":"Corpuser","description":"CorpUser represents an identity of a person (or an account) in the enterprise.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dashboard":{"id":"docs/generated/metamodel/entities/dashboard","title":"Dashboard","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataContract":{"id":"docs/generated/metamodel/entities/dataContract","title":"DataContract","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataFlow":{"id":"docs/generated/metamodel/entities/dataFlow","title":"DataFlow","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubAccessToken":{"id":"docs/generated/metamodel/entities/dataHubAccessToken","title":"DataHubAccessToken","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubExecutionRequest":{"id":"docs/generated/metamodel/entities/dataHubExecutionRequest","title":"DataHubExecutionRequest","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubIngestionSource":{"id":"docs/generated/metamodel/entities/dataHubIngestionSource","title":"DataHubIngestionSource","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubPolicy":{"id":"docs/generated/metamodel/entities/dataHubPolicy","title":"DataHubPolicy","description":"DataHub Policies represent access policies granted to users or groups on metadata operations like edit, view etc.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubRetention":{"id":"docs/generated/metamodel/entities/dataHubRetention","title":"DataHubRetention","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubRole":{"id":"docs/generated/metamodel/entities/dataHubRole","title":"DataHubRole","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubSecret":{"id":"docs/generated/metamodel/entities/dataHubSecret","title":"DataHubSecret","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubStepState":{"id":"docs/generated/metamodel/entities/dataHubStepState","title":"DataHubStepState","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubUpgrade":{"id":"docs/generated/metamodel/entities/dataHubUpgrade","title":"DataHubUpgrade","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataHubView":{"id":"docs/generated/metamodel/entities/dataHubView","title":"DataHubView","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataJob":{"id":"docs/generated/metamodel/entities/dataJob","title":"DataJob","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataPlatform":{"id":"docs/generated/metamodel/entities/dataPlatform","title":"Data Platform","description":"Data Platforms are systems or tools that contain Datasets, Dashboards, Charts, and all other kinds of data assets modeled in the metadata graph.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataPlatformInstance":{"id":"docs/generated/metamodel/entities/dataPlatformInstance","title":"DataPlatformInstance","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataProcess":{"id":"docs/generated/metamodel/entities/dataProcess","title":"DataProcess","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataProcessInstance":{"id":"docs/generated/metamodel/entities/dataProcessInstance","title":"DataProcessInstance","description":"DataProcessInstance represents an instance of a datajob/jobflow run","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataProduct":{"id":"docs/generated/metamodel/entities/dataProduct","title":"DataProduct","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/dataset":{"id":"docs/generated/metamodel/entities/dataset","title":"Dataset","description":"The dataset entity is one the most important entities in the metadata model. They represent collections of data that are typically represented as Tables or Views in a database (e.g. BigQuery, Snowflake, Redshift etc.), Streams in a stream-processing environment (Kafka, Pulsar etc.), bundles of data found as Files or Folders in data lake systems (S3, ADLS, etc.).","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/domain":{"id":"docs/generated/metamodel/entities/domain","title":"Domain","description":"A data domain within an organization.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/globalSettings":{"id":"docs/generated/metamodel/entities/globalSettings","title":"GlobalSettings","description":"Global settings for an the platform","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/glossaryNode":{"id":"docs/generated/metamodel/entities/glossaryNode","title":"GlossaryNode","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/glossaryTerm":{"id":"docs/generated/metamodel/entities/glossaryTerm","title":"GlossaryTerm","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/inviteToken":{"id":"docs/generated/metamodel/entities/inviteToken","title":"InviteToken","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/mlFeature":{"id":"docs/generated/metamodel/entities/mlFeature","title":"MlFeature","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/mlFeatureTable":{"id":"docs/generated/metamodel/entities/mlFeatureTable","title":"MlFeatureTable","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/mlModel":{"id":"docs/generated/metamodel/entities/mlModel","title":"MlModel","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/mlModelDeployment":{"id":"docs/generated/metamodel/entities/mlModelDeployment","title":"MlModelDeployment","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/mlModelGroup":{"id":"docs/generated/metamodel/entities/mlModelGroup","title":"MlModelGroup","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/mlPrimaryKey":{"id":"docs/generated/metamodel/entities/mlPrimaryKey","title":"MlPrimaryKey","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/notebook":{"id":"docs/generated/metamodel/entities/notebook","title":"Notebook","description":"\u26a0\ufe0f Notice: The Notebook entity is under active community development and IS NOT YET fully supported on the DataHub web application.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/ownershipType":{"id":"docs/generated/metamodel/entities/ownershipType","title":"OwnershipType","description":"Ownership Type represents a user-created ownership category for a person or group who is responsible for an asset.","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/post":{"id":"docs/generated/metamodel/entities/post","title":"Post","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/query":{"id":"docs/generated/metamodel/entities/query","title":"Query","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/role":{"id":"docs/generated/metamodel/entities/role","title":"Role","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/schemaField":{"id":"docs/generated/metamodel/entities/schemaField","title":"SchemaField","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/tag":{"id":"docs/generated/metamodel/entities/tag","title":"Tag","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/telemetry":{"id":"docs/generated/metamodel/entities/telemetry","title":"Telemetry","description":"Aspects","sidebar":"overviewSidebar"},"docs/generated/metamodel/entities/test":{"id":"docs/generated/metamodel/entities/test","title":"Test","description":"A DataHub test","sidebar":"overviewSidebar"},"docs/glossary/business-glossary":{"id":"docs/glossary/business-glossary","title":"Business Glossary","description":"Introduction","sidebar":"overviewSidebar"},"docs/how/add-custom-data-platform":{"id":"docs/how/add-custom-data-platform","title":"Adding a custom Dataset Data Platform","description":"A Data Platform represents a 3rd party system from which Metadata Entities are ingested from. Each Dataset that is ingested is associated with a single platform, for example MySQL, Snowflake, Redshift, or BigQuery.","sidebar":"overviewSidebar"},"docs/how/add-custom-ingestion-source":{"id":"docs/how/add-custom-ingestion-source","title":"Using a Custom Ingestion Source","description":"Adding a custom ingestion source is the easiest way to extend Datahubs ingestion framework to support source systems","sidebar":"overviewSidebar"},"docs/how/add-new-aspect":{"id":"docs/how/add-new-aspect","title":"How to add a new metadata aspect?","description":"Adding a new metadata aspect is one of the most common ways to extend an existing entity."},"docs/how/add-user-data":{"id":"docs/how/add-user-data","title":"Adding user metadata in DataHub","description":"This guide shares how you can add user metadata in DataHub. Usually you would want to use one of our sources for ingesting user metadata. But if there is no connector for your use case then you would want to use this guide."},"docs/how/backup-datahub":{"id":"docs/how/backup-datahub","title":"Taking backup of DataHub","description":"Production","sidebar":"overviewSidebar"},"docs/how/configuring-authorization-with-apache-ranger":{"id":"docs/how/configuring-authorization-with-apache-ranger","title":"Configuring Authorization with Apache Ranger","description":"DataHub integration with Apache Ranger allows DataHub Authorization policies to be controlled inside Apache Ranger.","sidebar":"overviewSidebar"},"docs/how/delete-metadata":{"id":"docs/how/delete-metadata","title":"Removing Metadata from DataHub","description":"To follow this guide, you\'ll need the DataHub CLI.","sidebar":"overviewSidebar"},"docs/how/extract-container-logs":{"id":"docs/how/extract-container-logs","title":"How to Extract Logs from DataHub Containers","description":"DataHub containers, datahub GMS (backend server) and datahub frontend (UI server), write log files to the local container filesystem. To extract these logs, you\'ll need to get them from inside the container where the services are running.","sidebar":"overviewSidebar"},"docs/how/jattach-guide":{"id":"docs/how/jattach-guide","title":"Debugging by Jattach","description":"We have installed jattach in Docker image of datahub-gms, datahub-mae-consumer, datahub-mce-consumer","sidebar":"overviewSidebar"},"docs/how/kafka-config":{"id":"docs/how/kafka-config","title":"Configuring Kafka","description":"DataHub requires Kafka to operate. Kafka is used as a durable log that can be used to store inbound","sidebar":"overviewSidebar"},"docs/how/migrating-graph-service-implementation":{"id":"docs/how/migrating-graph-service-implementation","title":"Migrate Graph Service Implementation to Elasticsearch","description":"We currently support either Elasticsearch or Neo4j as backend implementations for the graph service. We recommend","sidebar":"overviewSidebar"},"docs/how/restore-indices":{"id":"docs/how/restore-indices","title":"Restoring Search and Graph Indices from Local Database","description":"If search or graph services go down or you have made changes to them that require reindexing, you can restore them from","sidebar":"overviewSidebar"},"docs/how/search":{"id":"docs/how/search","title":"About DataHub Search","description":"The search bar is an important mechanism for discovering data assets in DataHub. From the search bar, you can find Datasets, Columns, Dashboards, Charts, Data Pipelines, and more. Simply type in a term and press \'enter\'.","sidebar":"overviewSidebar"},"docs/how/ui-tabs-guide":{"id":"docs/how/ui-tabs-guide","title":"UI Tabs Guide","description":"Some of the tabs in the UI might not be enabled by default. This guide is supposed to tell Admins of DataHub how to enable those UI tabs."},"docs/how/updating-datahub":{"id":"docs/how/updating-datahub","title":"Updating DataHub","description":"This file documents any backwards-incompatible changes in DataHub and assists people when migrating to a new version.","sidebar":"overviewSidebar"},"docs/lineage/airflow":{"id":"docs/lineage/airflow","title":"Airflow Integration","description":"If you\'re looking to schedule DataHub ingestion using Airflow, see the guide on scheduling ingestion with Airflow.","sidebar":"overviewSidebar"},"docs/links":{"id":"docs/links","title":"Articles &amp; Talks","description":"Overviews","sidebar":"overviewSidebar"},"docs/managed-datahub/approval-workflows":{"id":"docs/managed-datahub/approval-workflows","title":"About DataHub Approval Workflows","description":"Overview","sidebar":"overviewSidebar"},"docs/managed-datahub/chrome-extension":{"id":"docs/managed-datahub/chrome-extension","title":"Acryl DataHub Chrome Extension","description":"Learn how to upload and use the Acryl DataHub Chrome extension (beta) locally before it\'s available on the Chrome store.","sidebar":"overviewSidebar"},"docs/managed-datahub/datahub-api/entity-events-api":{"id":"docs/managed-datahub/datahub-api/entity-events-api","title":"Entity Events API","description":"This guide details the Entity Events API, which allows you to take action when things change on DataHub.","sidebar":"overviewSidebar"},"docs/managed-datahub/datahub-api/graphql-api/getting-started":{"id":"docs/managed-datahub/datahub-api/graphql-api/getting-started","title":"Getting Started","description":"Getting started with the DataHub GraphQL API.","sidebar":"overviewSidebar"},"docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta":{"id":"docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta","title":"Incidents API (Beta)","description":"This page provides an overview of working with the DataHub Incidents API.","sidebar":"overviewSidebar"},"docs/managed-datahub/integrations/aws-privatelink":{"id":"docs/managed-datahub/integrations/aws-privatelink","title":"AWS PrivateLink","description":"If you require a private connection between the provisioned DataHub instance and your own existing AWS account, Acryl supports using AWS PrivateLink in order to complete this private connection.","sidebar":"overviewSidebar"},"docs/managed-datahub/integrations/oidc-sso-integration":{"id":"docs/managed-datahub/integrations/oidc-sso-integration","title":"OIDC SSO Integration","description":"This page will help you set up OIDC SSO with your identity provider to log into Acryl Data","sidebar":"overviewSidebar"},"docs/managed-datahub/managed-datahub-overview":{"id":"docs/managed-datahub/managed-datahub-overview","title":"Managed DataHub Exclusives","description":"Acryl DataHub offers a slew of additional features on top of the normal OSS project.","sidebar":"overviewSidebar"},"docs/managed-datahub/metadata-ingestion-with-acryl/ingestion":{"id":"docs/managed-datahub/metadata-ingestion-with-acryl/ingestion","title":"Ingestion","description":"Acryl Metadata Ingestion functions similarly to that in open source DataHub. Sources are configured via the UI Ingestion or via a Recipe, ingestion recipes can be scheduled using your system of choice, and metadata can be pushed from anywhere.","sidebar":"overviewSidebar"},"docs/managed-datahub/observe/column-assertions":{"id":"docs/managed-datahub/observe/column-assertions","title":"Column Assertions","description":"This page provides an overview of working with DataHub Column Assertions","sidebar":"overviewSidebar"},"docs/managed-datahub/observe/custom-sql-assertions":{"id":"docs/managed-datahub/observe/custom-sql-assertions","title":"Custom SQL Assertions","description":"This page provides an overview of working with DataHub SQL Assertions","sidebar":"overviewSidebar"},"docs/managed-datahub/observe/freshness-assertions":{"id":"docs/managed-datahub/observe/freshness-assertions","title":"Freshness Assertions","description":"This page provides an overview of working with DataHub Freshness Assertions","sidebar":"overviewSidebar"},"docs/managed-datahub/observe/volume-assertions":{"id":"docs/managed-datahub/observe/volume-assertions","title":"Volume Assertions","description":"This page provides an overview of working with DataHub Volume Assertions","sidebar":"overviewSidebar"},"docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge":{"id":"docs/managed-datahub/operator-guide/setting-up-events-api-on-aws-eventbridge","title":"Setting up Events API on AWS EventBridge","description":"This guide will walk through the configuration required to start receiving Acryl DataHub events via AWS EventBridge.","sidebar":"overviewSidebar"},"docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws":{"id":"docs/managed-datahub/operator-guide/setting-up-remote-ingestion-executor-on-aws","title":"Setting up Remote Ingestion Executor on AWS","description":"This page describes the steps required to configure a remote ingestion executor, which allows you to ingest metadata from private metadata sources using private credentials via the DataHub UI.","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_1_69":{"id":"docs/managed-datahub/release-notes/v_0_1_69","title":"v0.1.69","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_1_70":{"id":"docs/managed-datahub/release-notes/v_0_1_70","title":"v0.1.70","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_1_72":{"id":"docs/managed-datahub/release-notes/v_0_1_72","title":"v0.1.72","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_1_73":{"id":"docs/managed-datahub/release-notes/v_0_1_73","title":"v0.1.73","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_0":{"id":"docs/managed-datahub/release-notes/v_0_2_0","title":"v0.2.0","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_1":{"id":"docs/managed-datahub/release-notes/v_0_2_1","title":"v0.2.1","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_10":{"id":"docs/managed-datahub/release-notes/v_0_2_10","title":"v0.2.10","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_11":{"id":"docs/managed-datahub/release-notes/v_0_2_11","title":"v0.2.11","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_12":{"id":"docs/managed-datahub/release-notes/v_0_2_12","title":"v0.2.12","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_13":{"id":"docs/managed-datahub/release-notes/v_0_2_13","title":"v0.2.13","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_2":{"id":"docs/managed-datahub/release-notes/v_0_2_2","title":"v0.2.2","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_3":{"id":"docs/managed-datahub/release-notes/v_0_2_3","title":"v0.2.3","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_4":{"id":"docs/managed-datahub/release-notes/v_0_2_4","title":"v0.2.4","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_5":{"id":"docs/managed-datahub/release-notes/v_0_2_5","title":"v0.2.5","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_6":{"id":"docs/managed-datahub/release-notes/v_0_2_6","title":"v0.2.6","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_7":{"id":"docs/managed-datahub/release-notes/v_0_2_7","title":"v0.2.7","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_8":{"id":"docs/managed-datahub/release-notes/v_0_2_8","title":"v0.2.8","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/release-notes/v_0_2_9":{"id":"docs/managed-datahub/release-notes/v_0_2_9","title":"v0.2.9","description":"---","sidebar":"overviewSidebar"},"docs/managed-datahub/saas-slack-setup":{"id":"docs/managed-datahub/saas-slack-setup","title":"Configure Slack For Notifications","description":"Install the DataHub Slack App into your Slack workspace","sidebar":"overviewSidebar"},"docs/managed-datahub/subscription-and-notification":{"id":"docs/managed-datahub/subscription-and-notification","title":"Subscriptions &amp; Notifications","description":"DataHub\'s Subscriptions and Notifications feature gives you real-time change alerts on data assets of your choice.","sidebar":"overviewSidebar"},"docs/managed-datahub/welcome-acryl":{"id":"docs/managed-datahub/welcome-acryl","title":"Getting Started with Acryl DataHub","description":"Welcome to the Acryl DataHub! We at Acryl are on a mission to make data reliable by bringing clarity to the who, what, when, &amp; how of your data ecosystem. We\'re thrilled to be on this journey with you; and cannot wait to see what we build together!","sidebar":"overviewSidebar"},"docs/modeling/extending-the-metadata-model":{"id":"docs/modeling/extending-the-metadata-model","title":"Extending the Metadata Model","description":"You can extend the metadata model by either creating a new Entity or extending an existing one. Unsure if you need to","sidebar":"overviewSidebar"},"docs/modeling/metadata-model":{"id":"docs/modeling/metadata-model","title":"The Metadata Model","description":"DataHub takes a schema-first approach to modeling metadata. We use the open-source Pegasus schema language (PDL) extended with a custom set of annotations to model metadata. The DataHub storage, serving, indexing and ingestion layer operates directly on top of the metadata model and supports strong types all the way from the client to the storage layer.","sidebar":"overviewSidebar"},"docs/ownership/ownership-types":{"id":"docs/ownership/ownership-types","title":"Custom Ownership Types","description":"\ud83e\udd1d Version compatibility","sidebar":"overviewSidebar"},"docs/platform-instances":{"id":"docs/platform-instances","title":"Working With Platform Instances","description":"DataHub\'s metadata model for Datasets supports a three-part key currently:","sidebar":"overviewSidebar"},"docs/plugins":{"id":"docs/plugins","title":"Plugins Guide","description":"Plugins are way to enhance the basic DataHub functionality in a custom manner.","sidebar":"overviewSidebar"},"docs/posts":{"id":"docs/posts","title":"About DataHub Posts","description":"DataHub allows users to make Posts that can be displayed on the app. Currently, Posts are only supported on the Home Page, but may be extended to other surfaces of the app in the future. Posts can be used to accomplish the following:","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/bigquery/configuration":{"id":"docs/quick-ingestion-guides/bigquery/configuration","title":"Configuration","description":"Now that you have created a Service Account and Service Account Key in BigQuery in the prior step, it\'s now time to set up a connection via the DataHub UI.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/bigquery/overview":{"id":"docs/quick-ingestion-guides/bigquery/overview","title":"Overview","description":"What You Will Get Out of This Guide","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/bigquery/setup":{"id":"docs/quick-ingestion-guides/bigquery/setup","title":"Setup","description":"To configure ingestion from BigQuery, you\'ll need a Service Account configured with the proper permission sets and an associated Service Account Key.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/looker/configuration":{"id":"docs/quick-ingestion-guides/looker/configuration","title":"Configuration","description":"Now that you have created a DataHub-specific API key with the relevant access in the prior step, it\'s time to set up a connection via the DataHub UI.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/looker/overview":{"id":"docs/quick-ingestion-guides/looker/overview","title":"Overview","description":"What You Will Get Out of This Guide","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/looker/setup":{"id":"docs/quick-ingestion-guides/looker/setup","title":"Setup","description":"Looker Prerequisites","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/powerbi/configuration":{"id":"docs/quick-ingestion-guides/powerbi/configuration","title":"Configuration","description":"Now that you have created a DataHub specific Azure AD app with the relevant access in the prior step, it\'s now time to set up a connection via the DataHub UI.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/powerbi/overview":{"id":"docs/quick-ingestion-guides/powerbi/overview","title":"Overview","description":"What You Will Get Out of This Guide","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/powerbi/setup":{"id":"docs/quick-ingestion-guides/powerbi/setup","title":"Setup","description":"In order to configure ingestion from PowerBI, you\'ll first have to ensure you have an Azure AD app with permission to access the PowerBI resources.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/redshift/configuration":{"id":"docs/quick-ingestion-guides/redshift/configuration","title":"Configuration","description":"Now that you have created a DataHub user in Redshift in the prior step, it\'s time to set up a connection via the DataHub UI.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/redshift/overview":{"id":"docs/quick-ingestion-guides/redshift/overview","title":"Overview","description":"What You Will Get Out of This Guide","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/redshift/setup":{"id":"docs/quick-ingestion-guides/redshift/setup","title":"Setup","description":"To configure ingestion from Redshift, you\'ll need a User configured with the proper permission sets, and an associated.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/snowflake/configuration":{"id":"docs/quick-ingestion-guides/snowflake/configuration","title":"Configuration","description":"Now that you have created a DataHub-specific user with the relevant roles in Snowflake in the prior step, it\'s now time to set up a connection via the DataHub UI.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/snowflake/overview":{"id":"docs/quick-ingestion-guides/snowflake/overview","title":"Overview","description":"What You Will Get Out of This Guide","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/snowflake/setup":{"id":"docs/quick-ingestion-guides/snowflake/setup","title":"Setup","description":"In order to configure ingestion from Snowflake, you\'ll first have to ensure you have a Snowflake user with the ACCOUNTADMIN role or MANAGE GRANTS privilege.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/tableau/configuration":{"id":"docs/quick-ingestion-guides/tableau/configuration","title":"Configuration","description":"Now that you have created a DataHub-specific user with the relevant access in Tableau in the prior step, it\'s now time to set up a connection via the DataHub UI.","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/tableau/overview":{"id":"docs/quick-ingestion-guides/tableau/overview","title":"Overview","description":"What You Will Get Out of This Guide","sidebar":"overviewSidebar"},"docs/quick-ingestion-guides/tableau/setup":{"id":"docs/quick-ingestion-guides/tableau/setup","title":"Setup","description":"In order to configure ingestion from Tableau, you\'ll first have to enable Tableau Metadata API and you should have a user with Site Administrator Explorer permissions.","sidebar":"overviewSidebar"},"docs/quickstart":{"id":"docs/quickstart","title":"DataHub Quickstart Guide","description":"This guide provides instructions on deploying the open source DataHub locally.","sidebar":"overviewSidebar"},"docs/rfc":{"id":"docs/rfc","title":"DataHub RFC Process","description":"What is an RFC?","sidebar":"overviewSidebar"},"docs/roadmap":{"id":"docs/roadmap","title":"DataHub Roadmap","description":"The DataHub Roadmap has a new home!"},"docs/schema-history":{"id":"docs/schema-history","title":"About DataHub Schema History","description":"Schema History is a valuable tool for understanding how a Dataset changes over time and gives insight into the following cases,","sidebar":"overviewSidebar"},"docs/slack":{"id":"docs/slack","title":"Slack","description":"The DataHub Slack is a thriving and rapidly growing community - we can\'t wait for you to join us!","sidebar":"overviewSidebar"},"docs/sync-status":{"id":"docs/sync-status","title":"About DataHub Sync Status","description":"When looking at metadata in DataHub, it\'s useful to know if the information you\'re looking at is relevant.","sidebar":"overviewSidebar"},"docs/tags":{"id":"docs/tags","title":"About DataHub Tags","description":"Tags are informal, loosely controlled labels that help in search &amp; discovery. They can be added to datasets, dataset schemas, or containers, for an easy way to label or categorize entities \u2013 without having to associate them to a broader business glossary or vocabulary.","sidebar":"overviewSidebar"},"docs/tests/metadata-tests":{"id":"docs/tests/metadata-tests","title":"Metadata Tests","description":"DataHub includes a highly configurable, no-code framework that allows you to configure broad-spanning monitors &amp; continuous actions","sidebar":"overviewSidebar"},"docs/townhall-history":{"id":"docs/townhall-history","title":"Town Hall History","description":"For the Town Hall meetings after June 2023, please refer to our LinkedIn Live event history."},"docs/townhalls":{"id":"docs/townhalls","title":"DataHub Town Halls","description":"We hold regular virtual town hall meetings to meet with DataHub community.","sidebar":"overviewSidebar"},"docs/troubleshooting/build":{"id":"docs/troubleshooting/build","title":"Build Debugging Guide","description":"For when Local Development did not work out smoothly.","sidebar":"overviewSidebar"},"docs/troubleshooting/general":{"id":"docs/troubleshooting/general","title":"General Debugging Guide","description":"Logo for my platform is not appearing on the Home Page or search results","sidebar":"overviewSidebar"},"docs/troubleshooting/quickstart":{"id":"docs/troubleshooting/quickstart","title":"Quickstart Debugging Guide","description":"For when Quickstart did not work out smoothly.","sidebar":"overviewSidebar"},"docs/ui-ingestion":{"id":"docs/ui-ingestion","title":"Ingestion","description":"Introduction","sidebar":"overviewSidebar"},"docs/what-is-datahub/datahub-concepts":{"id":"docs/what-is-datahub/datahub-concepts","title":"DataHub Concepts","description":"Explore key concepts of DataHub to take full advantage of its capabilities in managing your data.","sidebar":"overviewSidebar"},"docs/what/aspect":{"id":"docs/what/aspect","title":"What is a metadata aspect?","description":"A metadata aspect is a structured document, or more precisely a record in PDL,"},"docs/what/delta":{"id":"docs/what/delta","title":"What is a metadata delta?","description":"Rest.li supports partial update natively without needing explicitly defined models."},"docs/what/entity":{"id":"docs/what/entity","title":"Entities","description":"This page has been moved. Please refer to The Metadata Model for details on"},"docs/what/gma":{"id":"docs/what/gma","title":"What is Generalized Metadata Architecture (GMA)?","description":"GMA is the backend infrastructure for DataHub. Unlike existing architectures, GMA leverages multiple storage technologies to efficiently service the four most commonly used query patterns"},"docs/what/gms":{"id":"docs/what/gms","title":"What is Generalized Metadata Service (GMS)?","description":"Metadata for entities onboarded to GMA is served through microservices known as Generalized Metadata Service (GMS). GMS typically provides a Rest.li API and must access the metadata using GMA DAOs."},"docs/what/graph":{"id":"docs/what/graph","title":"What is GMA graph?","description":"All the entities and relationships are stored in a graph database, Neo4j."},"docs/what/mxe":{"id":"docs/what/mxe","title":"Metadata Events","description":"DataHub makes use a few important Kafka events for operation. The most notable of these include","sidebar":"overviewSidebar"},"docs/what/relationship":{"id":"docs/what/relationship","title":"What is a relationship?","description":"A relationship is a named associate between exactly two entities, a source and a destination."},"docs/what/search-document":{"id":"docs/what/search-document","title":"What is a search document?","description":"Search documents are also modeled using PDL explicitly."},"docs/what/search-index":{"id":"docs/what/search-index","title":"What is GMA search index?","description":"Each search document type (or entity type) will be mapped to an independent search index in Elasticsearch."},"docs/what/snapshot":{"id":"docs/what/snapshot","title":"What is a snapshot?","description":"A metadata snapshot models the current state of one or multiple metadata aspects associated with a particular entity."},"docs/what/urn":{"id":"docs/what/urn","title":"What is URN?","description":"URN (Uniform Resource Name) is the chosen scheme of URI to uniquely define any resource in DataHub. It has the following form"},"graphql/enums":{"id":"graphql/enums","title":"Enums","description":"AccessLevel","sidebar":"overviewSidebar"},"graphql/inputObjects":{"id":"graphql/inputObjects","title":"Input objects","description":"AcceptRoleInput","sidebar":"overviewSidebar"},"graphql/interfaces":{"id":"graphql/interfaces","title":"Interfaces","description":"Aspect","sidebar":"overviewSidebar"},"graphql/mutations":{"id":"graphql/mutations","title":"Mutations","description":"acceptRole","sidebar":"overviewSidebar"},"graphql/objects":{"id":"graphql/objects","title":"Objects","description":"Access","sidebar":"overviewSidebar"},"graphql/queries":{"id":"graphql/queries","title":"Queries","description":"aggregateAcrossEntities","sidebar":"overviewSidebar"},"graphql/scalars":{"id":"graphql/scalars","title":"Scalars","description":"Boolean","sidebar":"overviewSidebar"},"graphql/unions":{"id":"graphql/unions","title":"Unions","description":"AnalyticsChart","sidebar":"overviewSidebar"},"metadata-ingestion-modules/airflow-plugin/README":{"id":"metadata-ingestion-modules/airflow-plugin/README","title":"Datahub Airflow Plugin","description":"See the DataHub Airflow docs for details."},"metadata-ingestion/adding-source":{"id":"metadata-ingestion/adding-source","title":"Adding a Metadata Ingestion Source","description":"There are two ways of adding a metadata ingestion source.","sidebar":"overviewSidebar"},"metadata-ingestion/as-a-library":{"id":"metadata-ingestion/as-a-library","title":"Python Emitter","description":"In some cases, you might want to construct Metadata events directly and use programmatic ways to emit that metadata to DataHub. Use-cases are typically push-based and include emitting metadata events from CI/CD pipelines, custom orchestrators etc.","sidebar":"overviewSidebar"},"metadata-ingestion/cli-ingestion":{"id":"metadata-ingestion/cli-ingestion","title":"CLI Ingestion","description":"Installing the CLI","sidebar":"overviewSidebar"},"metadata-ingestion/developing":{"id":"metadata-ingestion/developing","title":"Developing on Metadata Ingestion","description":"If you just want to use metadata ingestion, check the user-centric guide.","sidebar":"overviewSidebar"},"metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source":{"id":"metadata-ingestion/docs/dev_guides/add_stateful_ingestion_to_source","title":"Adding Stateful Ingestion to a Source","description":"Currently, datahub supports the Stale Metadata Removal and","sidebar":"overviewSidebar"},"metadata-ingestion/docs/dev_guides/classification":{"id":"metadata-ingestion/docs/dev_guides/classification","title":"Classification","description":"The classification feature enables sources to be configured to automatically predict info types for columns and use them as glossary terms. This is an explicit opt-in feature and is not enabled by default.","sidebar":"overviewSidebar"},"metadata-ingestion/docs/dev_guides/profiling_ingestions":{"id":"metadata-ingestion/docs/dev_guides/profiling_ingestions","title":"Profiling ingestions","description":"\ud83e\udd1d Version compatibility","sidebar":"overviewSidebar"},"metadata-ingestion/docs/dev_guides/reporting_telemetry":{"id":"metadata-ingestion/docs/dev_guides/reporting_telemetry","title":"Datahub\'s Reporting Framework for Ingestion Job Telemetry","description":"The Datahub\'s reporting framework allows for configuring reporting providers with the ingestion pipelines to send","sidebar":"overviewSidebar"},"metadata-ingestion/docs/dev_guides/sql_profiles":{"id":"metadata-ingestion/docs/dev_guides/sql_profiles","title":"SQL Profiling","description":"SQL Profiling collects table level and column level statistics.","sidebar":"overviewSidebar"},"metadata-ingestion/docs/dev_guides/stateful":{"id":"metadata-ingestion/docs/dev_guides/stateful","title":"Stateful Ingestion","description":"The stateful ingestion feature enables sources to be configured to save custom checkpoint states from their","sidebar":"overviewSidebar"},"metadata-ingestion/docs/transformer/dataset_transformer":{"id":"metadata-ingestion/docs/transformer/dataset_transformer","title":"Dataset","description":"The below table shows transformer which can transform aspects of entity Dataset.","sidebar":"overviewSidebar"},"metadata-ingestion/docs/transformer/intro":{"id":"metadata-ingestion/docs/transformer/intro","title":"Introduction","description":"What\u2019s a transformer?","sidebar":"overviewSidebar"},"metadata-ingestion/examples/transforms/README":{"id":"metadata-ingestion/examples/transforms/README","title":"Custom transformer script","description":"This script sets up a transformer that reads in a list of owner URNs from a JSON file specified via owners_json and appends these owners to every MCE."},"metadata-ingestion/integration_docs/great-expectations":{"id":"metadata-ingestion/integration_docs/great-expectations","title":"Great Expectations","description":"This guide helps to setup and configure DataHubValidationAction in Great Expectations to send assertions(expectations) and their results to DataHub using DataHub\'s Python Rest emitter.","sidebar":"overviewSidebar"},"metadata-ingestion/README":{"id":"metadata-ingestion/README","title":"Introduction to Metadata Ingestion","description":"Please see our Integrations page to browse our ingestion sources and filter on their features.","sidebar":"overviewSidebar"},"metadata-ingestion/recipe_overview":{"id":"metadata-ingestion/recipe_overview","title":"Recipes","description":"A recipe is the main configuration file for metadata ingestion. It tells our ingestion scripts where to pull data from (source) and where to put it (sink).","sidebar":"overviewSidebar"},"metadata-ingestion/schedule_docs/airflow":{"id":"metadata-ingestion/schedule_docs/airflow","title":"Using Airflow","description":"If you are using Apache Airflow for your scheduling then you might want to also use it for scheduling your ingestion recipes. For any Airflow specific questions you can go through Airflow docs for more details.","sidebar":"overviewSidebar"},"metadata-ingestion/schedule_docs/cron":{"id":"metadata-ingestion/schedule_docs/cron","title":"Using Cron","description":"Assume you have a recipe file /home/ubuntu/datahubingest/mysqlto_datahub.yml on your machine","sidebar":"overviewSidebar"},"metadata-ingestion/schedule_docs/datahub":{"id":"metadata-ingestion/schedule_docs/datahub","title":"Using DataHub","description":"UI Ingestion can be used to schedule metadata ingestion through DataHub."},"metadata-ingestion/schedule_docs/intro":{"id":"metadata-ingestion/schedule_docs/intro","title":"Introduction to Scheduling Metadata Ingestion","description":"Given a recipe file /home/ubuntu/datahubingest/mysqlto_datahub.yml.","sidebar":"overviewSidebar"},"metadata-ingestion/schedule_docs/kubernetes":{"id":"metadata-ingestion/schedule_docs/kubernetes","title":"Using Kubernetes","description":"If you have deployed DataHub using our official helm charts you can use the","sidebar":"overviewSidebar"},"metadata-ingestion/sink_docs/console":{"id":"metadata-ingestion/sink_docs/console","title":"Console","description":"For context on getting started with ingestion, check out our metadata ingestion guide.","sidebar":"overviewSidebar"},"metadata-ingestion/sink_docs/datahub":{"id":"metadata-ingestion/sink_docs/datahub","title":"DataHub","description":"DataHub Rest","sidebar":"overviewSidebar"},"metadata-ingestion/sink_docs/file":{"id":"metadata-ingestion/sink_docs/file","title":"File","description":"For context on getting started with ingestion, check out our metadata ingestion guide.","sidebar":"overviewSidebar"},"metadata-ingestion/sink_overview":{"id":"metadata-ingestion/sink_overview","title":"Sinks","description":"Sinks are destinations for metadata.","sidebar":"overviewSidebar"},"metadata-ingestion/source_overview":{"id":"metadata-ingestion/source_overview","title":"Sources","description":"Sources are the data systems that we are extracting metadata from.","sidebar":"overviewSidebar"},"metadata-ingestion/source-docs-template":{"id":"metadata-ingestion/source-docs-template","title":"Source Name","description":"Certified"},"metadata-integration/java/as-a-library":{"id":"metadata-integration/java/as-a-library","title":"Java Emitter","description":"In some cases, you might want to construct Metadata events directly and use programmatic ways to emit that metadata to DataHub. Use-cases are typically push-based and include emitting metadata events from CI/CD pipelines, custom orchestrators etc.","sidebar":"overviewSidebar"},"metadata-integration/java/datahub-protobuf/README":{"id":"metadata-integration/java/datahub-protobuf/README","title":"Protobuf Schemas","description":"The datahub-protobuf module is designed to be used with the Java Emitter, the input is a compiled protobuf binary .protoc files and optionally the corresponding .proto source code. You can supply a file with multiple nested messages to be processed. If you have a file with multiple non-nested messages, you will need to separate them out into different files or supply the root message, as otherwise we will only process the first one.","sidebar":"overviewSidebar"},"metadata-integration/java/spark-lineage/README":{"id":"metadata-integration/java/spark-lineage/README","title":"Spark","description":"To integrate Spark with DataHub, we provide a lightweight Java agent that listens for Spark application and job events and pushes metadata out to DataHub in real-time. The agent listens to events such application start/end, and SQLExecution start/end to create pipelines (i.e. DataJob) and tasks (i.e. DataFlow) in Datahub along with lineage to datasets that are being read from and written to. Read on to learn how to configure this for different Spark scenarios.","sidebar":"overviewSidebar"},"metadata-jobs/mae-consumer-job/README":{"id":"metadata-jobs/mae-consumer-job/README","title":"metadata-jobs:mae-consumer-job","description":"The Metadata Audit Event Consumer is a Spring job which can be deployed by itself, or as part of the Metadata Service.","sidebar":"overviewSidebar"},"metadata-jobs/mce-consumer-job/README":{"id":"metadata-jobs/mce-consumer-job/README","title":"metadata-jobs:mce-consumer-job","description":"The Metadata Change Event Consumer is a Spring job which can be deployed by itself, or as part of the Metadata Service.","sidebar":"overviewSidebar"},"metadata-jobs/README":{"id":"metadata-jobs/README","title":"MXE Processing Jobs","description":"DataHub uses Kafka as the pub-sub message queue in the backend. There are 2 Kafka topics used by DataHub which are"},"metadata-models-custom/README":{"id":"metadata-models-custom/README","title":"A Custom Metadata Model","description":"This module hosts a gradle project where you can store your custom metadata model. It contains an example extension for you to follow."},"metadata-service/README":{"id":"metadata-service/README","title":"metadata-service","description":"DataHub Metadata Service is a service written in Java consisting of multiple servlets:","sidebar":"overviewSidebar"},"metadata-service/services/README":{"id":"metadata-service/services/README","title":"Service Layer","description":"Module to abstract away business logic from implementation specific libraries to make them lighter weight from a"},"perf-test/README":{"id":"perf-test/README","title":"Load testing with Locust","description":"Locust is an open-source, python-based, easy-to-use load testing tool. It provides an interface to"},"python-sdk/builder":{"id":"python-sdk/builder","title":"Builder","description":"\\\\n\\\\n\\\\nThese classes and methods make it easier to construct MetadataChangeProposals and MetadataChangeEvents.\\\\n\\\\n\\\\nclass datahub.emitter.mcp.MetadataChangeProposalWrapper(entityType=\'ENTITYTYPEUNSET\', changeType=\'UPSERT\', entityUrn=None, entityKeyAspect=None, auditHeader=None, aspectName=None, aspect=None, systemMetadata=None)\\\\nBases\\\\n\\\\nentityType (str)\\\\nchangeType (Union[str, ChangeTypeClass])\\\\nentityUrn (Optional[str])\\\\nentityKeyAspect (Optional[Aspect])\\\\nauditHeader (Optional[KafkaAuditHeaderClass])\\\\naspectName (Optional[str])\\\\naspect (Optional[Aspect])\\\\nsystemMetadata (Optional[SystemMetadataClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nentityType Union[str, ChangeTypeClass] = \'UPSERT\'\\\\n\\\\n\\\\n\\\\nentityUrn Optional[Aspect] = None\\\\n\\\\n\\\\n\\\\nauditHeader Optional[str] = None\\\\n\\\\n\\\\n\\\\naspect Optional[SystemMetadataClass] = None\\\\n\\\\n\\\\n\\\\nclassmethod constructmany(entityUrn, aspects)\\\\n\\\\nParameters\\\\nList[MetadataChangeProposalWrapper]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nmakemcp()\\\\n\\\\nReturn type\\\\nbool\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ntoobj(tuples=False, simplifiedstructure=False)\\\\n\\\\nParameters\\\\ndict\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromobj(obj, tuples=False)\\\\nAttempt to deserialize into an MCPW, but fall back\\\\nto a standard MCP if we\\\\u2019re missing codegen\\\\u2019d classes for the\\\\nentity key or aspect.\\\\n\\\\nParameters\\\\nUnion[MetadataChangeProposalWrapper, MetadataChangeProposalClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod tryfrommcpc(mcpc)\\\\nAttempts to create a MetadataChangeProposalWrapper from a MetadataChangeProposalClass.\\\\nNeatly handles unsupported, expected cases, such as unknown aspect types or non-json content type.\\\\n\\\\nRaises\\\\nmcpc (MetadataChangeProposalClass)\\\\n\\\\nReturn type\\\\nmcl (MetadataChangeLogClass)\\\\n\\\\nReturn type\\\\n\\\\nobj (dict)\\\\ntuples (bool)\\\\n\\\\n\\\\nReturn type\\\\n\\\\ntreaterrorsaswarnings (bool)\\\\nisprimarysource (bool)\\\\n\\\\n\\\\nReturn type\\\\nvalue (bool)\\\\n\\\\nReturn type Enum\\\\nAn enumeration.\\\\n\\\\n\\\\nUSER = \'corpuser\'\\\\n\\\\n\\\\n\\\\nGROUP = \'corpGroup\'\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.getsystime()\\\\n\\\\nReturn type\\\\nplatform (str)\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\nname (str)\\\\nenv (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\ninstance (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\nname (str)\\\\nplatforminstance (Optional[str])\\\\nenv (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nparenturn (str)\\\\nfieldpath (str)\\\\n\\\\n\\\\nReturn type\\\\nschemafieldurn (str)\\\\n\\\\nReturn type\\\\ndataseturn (str)\\\\n\\\\nReturn type\\\\nkey (DatasetKeyClass)\\\\n\\\\nReturn type\\\\nguid (Union[str, DatahubKey])\\\\n\\\\nReturn type\\\\nguid (str)\\\\n\\\\nReturn type\\\\nobj (dict)\\\\n\\\\nReturn type\\\\nassertionid (str)\\\\n\\\\nReturn type\\\\nassertionurn (str)\\\\n\\\\nReturn type\\\\nusername (str)\\\\n\\\\nReturn type\\\\ngroupname (str)\\\\n\\\\nReturn type\\\\ntag (str)\\\\n\\\\nReturn type\\\\n\\\\nowner (str)\\\\nownertype (OwnerType)\\\\n\\\\n\\\\nReturn type\\\\nterm (str)\\\\n\\\\nReturn type\\\\n\\\\norchestrator (str)\\\\nflowid (str)\\\\ncluster (str)\\\\nplatforminstance (Optional[str])\\\\n\\\\n\\\\nReturn type\\\\n\\\\nflowurn (str)\\\\njobid (str)\\\\n\\\\n\\\\nReturn type\\\\ndataProcessInstanceId (str)\\\\n\\\\nReturn type\\\\n\\\\norchestrator (str)\\\\nflowid (str)\\\\njobid (str)\\\\ncluster (str)\\\\nplatforminstance (Optional[str])\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\nname (str)\\\\nplatforminstance (Optional[str])\\\\n\\\\n\\\\nReturn type\\\\ndashboardurn (str)\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\nname (str)\\\\nplatforminstance (Optional[str])\\\\n\\\\n\\\\nReturn type\\\\ncharturn (str)\\\\n\\\\nReturn type\\\\ndomain (str)\\\\n\\\\nReturn type\\\\n\\\\nfeaturetablename (str)\\\\nprimarykeyname (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nfeaturetablename (str)\\\\nfeaturename (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\nfeaturetablename (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\nmodelname (str)\\\\nenv (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\ndeploymentname (str)\\\\nenv (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nplatform (str)\\\\ngroupname (str)\\\\nenv (str)\\\\n\\\\n\\\\nReturn type\\\\nownershiptype (Optional[str])\\\\n\\\\nReturn type\\\\nownershiptype (Optional[str])\\\\n\\\\nReturn type this function only supports lineage for dataset aspects. It will not\\\\nupdate lineage for any other aspect types.\\\\n\\\\nParameters\\\\nMetadataChangeEventClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.canaddaspect(mce, AspectType)\\\\n\\\\nParameters\\\\nbool\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.assertcanaddaspect(mce, AspectType)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.getaspectifavailable(mce, AspectType)\\\\n\\\\nParameters\\\\nOptional[TypeVar(Aspect, bound= Aspect)]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.removeaspectifavailable(mce, aspecttype)\\\\n\\\\nParameters\\\\nbool\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.getoraddaspect(mce, default)\\\\n\\\\nParameters\\\\nTypeVar(Aspect, bound= Aspect)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.makeglobaltagaspectwithtaglist(tags)\\\\n\\\\nParameters\\\\nGlobalTagsClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.makeownershipaspectfromurnlist(ownerurns, sourcetype, ownertype=\'DATAOWNER\')\\\\n\\\\nParameters\\\\nOwnershipClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.makeglossarytermsaspectfromurnlist(termurns)\\\\n\\\\nParameters\\\\nGlossaryTermsClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcebuilder.setaspect(mce, aspect, aspecttype)\\\\nSets the aspect to the provided aspect, overwriting any previous aspect value that might have existed before.\\\\nIf passed in aspect is None, then the existing aspect value will be removed\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcpbuilder.DatahubKey(data)\\\\nBases\\\\ndata (Any)\\\\n\\\\n\\\\n\\\\n\\\\nguid_dict()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcp_builder.ContainerKey(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nplatform (str)\\\\ninstance (str | None)\\\\nenv (str | None)\\\\nbackcompatenvasinstance (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nplatform Optional[str]\\\\n\\\\n\\\\n\\\\nenv bool\\\\n\\\\n\\\\n\\\\nguiddict()\\\\n\\\\nReturn type\\\\nDict[str, str]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nasurn()\\\\n\\\\nReturn type ContainerKey\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcpbuilder.SchemaKey(**data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nplatform (str)\\\\ninstance (str | None)\\\\nenv (str | None)\\\\nbackcompatenvasinstance (bool)\\\\ndatabase (str)\\\\nschema (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndbschema ContainerKey\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcp_builder.MetastoreKey(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nplatform (str)\\\\ninstance (str | None)\\\\nenv (str | None)\\\\nbackcompatenvasinstance (bool)\\\\nmetastore (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nmetastore MetastoreKey\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcp_builder.UnitySchemaKeyWithMetastore(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nplatform (str)\\\\ninstance (str | None)\\\\nenv (str | None)\\\\nbackcompatenvasinstance (bool)\\\\nmetastore (str)\\\\ncatalog (str)\\\\nunityschema (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nunityschema ContainerKey\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcp_builder.UnitySchemaKey(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nplatform (str)\\\\ninstance (str | None)\\\\nenv (str | None)\\\\nbackcompatenvasinstance (bool)\\\\ncatalog (str)\\\\nunityschema (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nunityschema ProjectIdKey\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcp_builder.FolderKey(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nplatform (str)\\\\ninstance (str | None)\\\\nenv (str | None)\\\\nbackcompatenvasinstance (bool)\\\\nfolderabspath (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nfolderabspath ContainerKey\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.mcp_builder.NotebookKey(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nnotebookid (int)\\\\nplatform (str)\\\\ninstance (str | None)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nnotebookid str\\\\n\\\\n\\\\n\\\\ninstance\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.adddomaintoentitywu(entityurn, domainurn)\\\\n\\\\nParameters\\\\nIterable[MetadataWorkUnit]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.addownertoentitywu(entitytype, entityurn, ownerurn)\\\\n\\\\nParameters\\\\nIterable[MetadataWorkUnit]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.addtagstoentitywu(entitytype, entityurn, tags)\\\\n\\\\nParameters\\\\nIterable[MetadataWorkUnit]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.gencontainers(containerkey, name, subtypes, parentcontainerkey=None, extraproperties=None, domainurn=None, description=None, ownerurn=None, externalurl=None, tags=None, qualifiedname=None, created=None, lastmodified=None)\\\\n\\\\nParameters\\\\nIterable[MetadataWorkUnit]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.adddatasettocontainer(containerkey, dataseturn)\\\\n\\\\nParameters\\\\nIterable[MetadataWorkUnit]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.addentitytocontainer(containerkey, entitytype, entityurn)\\\\n\\\\nParameters\\\\nIterable[MetadataWorkUnit]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.mcpsfrommce(mce)\\\\n\\\\nParameters\\\\nIterable[MetadataChangeProposalWrapper]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.createembedmcp(urn, embedurl)\\\\n\\\\nParameters\\\\nMetadataChangeProposalWrapper\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.mcpbuilder.entitysupportsaspect(entitytype, aspecttype)\\\\n\\\\nParameters\\\\nbool\\\\n\\\\n\\\\n\\\\n\\\\n\\"}}&gt;","sidebar":"overviewSidebar"},"python-sdk/clients":{"id":"python-sdk/clients","title":"Client","description":"\\\\n\\\\n\\\\nThe Kafka emitter or Rest emitter can be used to push metadata to DataHub.\\\\nThe DataHub graph client extends the Rest emitter with additional functionality.\\\\n\\\\n\\\\nclass datahub.emitter.restemitter.DataHubRestEmitter(gmsserver, token=None, timeoutsec=None, connecttimeoutsec=None, readtimeoutsec=None, retrystatuscodes=None, retrymethods=None, retrymaxtimes=None, extraheaders=None, cacertificatepath=None, clientcertificatepath=None, disablesslverification=False)\\\\nBases\\\\n\\\\ngmsserver (str)\\\\ntoken (Optional[str])\\\\ntimeoutsec (Optional[float])\\\\nconnecttimeoutsec (Optional[float])\\\\nreadtimeoutsec (Optional[float])\\\\nretrystatuscodes (Optional[List[int]])\\\\nretrymethods (Optional[List[str]])\\\\nretrymaxtimes (Optional[int])\\\\nextraheaders (Optional[Dict[str, str]])\\\\ncacertificatepath (Optional[str])\\\\nclientcertificatepath (Optional[str])\\\\ndisablesslverification (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ntestconnection()\\\\n\\\\nReturn type\\\\nDataHubGraph\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nemit(item, callback=None)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nemitmce(mce)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nemitmcp(mcp)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nemitusage(usageStats)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nflush()\\\\n\\\\nReturn type\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndatahub.emitter.restemitter.DatahubRestEmitter\\\\nalias of DataHubRestEmitter\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.kafkaemitter.KafkaEmitterConfig(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nconnection (KafkaProducerConnectionConfig)\\\\ntopic_routes (Dict[str, str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nconnection Dict[str, str]\\\\n\\\\n\\\\n\\\\nclassmethod validate_topic_routes(v)\\\\n\\\\nParameters\\\\nDict[str, str]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.emitter.kafka_emitter.DatahubKafkaEmitter(config)\\\\nBases\\\\nconfig (KafkaEmitterConfig)\\\\n\\\\n\\\\n\\\\n\\\\nemit(item, callback=None)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nemit_mce_async(mce, callback)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nemit_mcp_async(mcp, callback)\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nflush()\\\\n\\\\nReturn type\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.ingestion.graph.client.DatahubClientConfig(data)\\\\nBases\\\\n\\\\ndata (Any)\\\\nserver (str)\\\\ntoken (str | None)\\\\ntimeoutsec (int | None)\\\\nretrystatuscodes (List[int] | None)\\\\nretrymaxtimes (int | None)\\\\nextraheaders (Dict[str, str] | None)\\\\ncacertificatepath (str | None)\\\\nclientcertificatepath (str | None)\\\\ndisablesslverification (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nserver Optional[str]\\\\n\\\\n\\\\n\\\\ntimeoutsec Optional[List[int]]\\\\n\\\\n\\\\n\\\\nretrymaxtimes Optional[Dict[str, str]]\\\\n\\\\n\\\\n\\\\ncacertificatepath Optional[str]\\\\n\\\\n\\\\n\\\\ndisablesslverification object\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\nrelationshiptype DataHubRestEmitter\\\\n\\\\nParameters\\\\nemitter (DataHubRestEmitter)\\\\n\\\\nReturn type\\\\n\\\\nentityurn (str) \\\\u2013 The urn of the entity\\\\naspecttype (Type[TypeVar(Aspect, bound= Aspect)]) \\\\u2013 The type class of the aspect being requested (e.g. datahub.metadata.schemaclasses.DatasetProperties)\\\\nversion (int) \\\\u2013 The version of the aspect to retrieve. The default of 0 means latest. Versions &amp;gt; 0 go from oldest to newest, so 1 is the oldest.\\\\n\\\\n\\\\nReturn type\\\\nthe Aspect as a dictionary if present, None if no aspect was found (HTTP status 404)\\\\n\\\\nRaises\\\\n\\\\nentityurn (str)\\\\naspecttype (Type[TypeVar(Aspect, bound= Aspect)])\\\\naspect (str)\\\\naspecttypename (Optional[str])\\\\nversion (int)\\\\n\\\\n\\\\nReturn type\\\\nDict[str, Any]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetownership(entityurn)\\\\n\\\\nParameters\\\\nOptional[OwnershipClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetschemametadata(entityurn)\\\\n\\\\nParameters\\\\nOptional[SchemaMetadataClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetdomainproperties(entityurn)\\\\n\\\\nParameters\\\\nOptional[DomainPropertiesClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetdatasetproperties(entityurn)\\\\n\\\\nParameters\\\\nOptional[DatasetPropertiesClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngettags(entityurn)\\\\n\\\\nParameters\\\\nOptional[GlobalTagsClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetglossaryterms(entityurn)\\\\n\\\\nParameters\\\\nOptional[GlossaryTermsClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetdomain(entityurn)\\\\n\\\\nParameters\\\\nOptional[DomainsClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetbrowsepath(entityurn)\\\\n\\\\nParameters\\\\nOptional[BrowsePathsClass]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetusageaspectsfromurn(entityurn, starttimestamp, endtimestamp)\\\\n\\\\nParameters\\\\nOptional[List[DatasetUsageStatisticsClass]]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nlistallentityurns(entitytype, start, count)\\\\n\\\\nParameters\\\\nOptional[List[str]]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetlatesttimeseriesvalue(entityurn, aspecttype, filtercriteriamap)\\\\n\\\\nParameters\\\\nOptional[TypeVar(Aspect, bound= Aspect)]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetentityraw(entityurn, aspects=None)\\\\n\\\\nParameters\\\\nDict\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetaspectsforentity(entityurn, aspects, aspecttypes)\\\\nGet multiple aspects for an entity.\\\\nDeprecated in favor of getaspect (single aspect) or getentitysemityped (full\\\\nentity without manually specifying a list of aspects).\\\\nWarning\\\\n\\\\nentityurn (str) \\\\u2013 The urn of the entity\\\\naspecttypelist (List[Type[Aspect]]) \\\\u2013 List of aspect type classes being requested (e.g. [datahub.metadata.schemaclasses.DatasetProperties])\\\\naspectslist (List[str]) \\\\u2013 List of aspect names being requested (e.g. [schemaMetadata, datasetProperties])\\\\nentityurn\\\\naspects (List[str])\\\\naspecttypes (List[Type[TypeVar(Aspect, bound= Aspect)]])\\\\n\\\\n\\\\nReturn type\\\\nOptionally, a map of aspectname to aspectvalue as a dictionary if present, aspectvalue will be set to None if that aspect was not found. Returns None on HTTP status 404.\\\\n\\\\nRaises Do not use this method to determine if an entity exists! This method will always return\\\\nsomething, even if the entity doesn\\\\u2019t actually exist in DataHub.\\\\n\\\\nParameters\\\\nAspectBag\\\\n\\\\nReturns\\\\ndomainname (str)\\\\n\\\\nReturn type\\\\n\\\\nenv (Optional[str])\\\\nsearchquery (str)\\\\n\\\\n\\\\nReturn type\\\\n\\\\nentitytypes (Optional[List[str]]) \\\\u2013 List of entity types to include. If None, all entity types will be returned.\\\\nplatform (Optional[str]) \\\\u2013 Platform to filter on. If None, all platforms will be returned.\\\\nplatforminstance (Optional[str]) \\\\u2013 Platform instance to filter on. If None, all platform instances will be returned.\\\\nenv (Optional[str]) \\\\u2013 Environment (e.g. PROD, DEV) to filter on. If None, all environments will be returned.\\\\nquery (Optional[str]) \\\\u2013 Query string to filter on. If None, all entities will be returned.\\\\ncontainer (Optional[str]) \\\\u2013 A container urn that entities must be within.\\\\nThis works recursively, so it will include entities within sub-containers as well.\\\\nIf None, all entities will be returned.\\\\nNote that this requires browsePathV2 aspects (added in 0.10.4+).\\\\nstatus (RemovedStatusFilter) \\\\u2013 Filter on the deletion status of the entity. The default is only return non-soft-deleted entities.\\\\nextraFilters (Optional[List[Dict[str, Any]]]) \\\\u2013 Additional filters to apply. If specified, the results will match all of the filters.\\\\nbatchsize (int)\\\\n\\\\n\\\\nReturn type\\\\nAn iterable of urns that match the filters.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetlatestpipelinecheckpoint(pipelinename, platform)\\\\n\\\\nParameters\\\\nOptional[Checkpoint[GenericCheckpointState]]\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetsearchresults(start=0, count=1, entity=\'dataset\')\\\\n\\\\nParameters\\\\nDict\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetaspectcounts(aspect, urnlike=None)\\\\n\\\\nParameters\\\\nint\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nexecutegraphql(query, variables=None)\\\\n\\\\nParameters\\\\nDict\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass RelationshipDirection(value)\\\\nBases\\\\n\\\\nentityurn (str)\\\\nrelationshiptypes (List[str])\\\\ndirection (RelationshipDirection)\\\\n\\\\n\\\\nReturn type\\\\nentityurn (str)\\\\n\\\\nReturn type\\\\n\\\\nurn (str) \\\\u2013 The urn of the entity to soft-delete.\\\\nrunid (str)\\\\ndeletiontimestamp (Optional[int])\\\\n\\\\n\\\\nReturn type\\\\nurn (str) \\\\u2013 The urn of the entity to hard delete.\\\\n\\\\nReturn type\\\\nA tuple of (rowsaffected, timeseriesrowsaffected).\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ndeleteentity(urn, hard=False)\\\\nDelete an entity by urn.\\\\n\\\\nParameters\\\\nNone\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nharddeletetimeseriesaspect(urn, aspectname, starttime, endtime)\\\\nHard delete timeseries aspects of an entity.\\\\n\\\\nParameters\\\\nint\\\\n\\\\nReturns\\\\n\\\\nurn (str) \\\\u2013 The urn of the entity to delete references to.\\\\ndryrun (bool) \\\\u2013 If True, do not actually delete the references, just return the count of\\\\nreferences and the list of related aspects.\\\\n\\\\n\\\\nReturn type\\\\nA tuple of (referencecount, sample of relatedaspects).\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ninitializeschemaresolverfromdatahub(platform, platforminstance, env)\\\\n\\\\nParameters\\\\nSchemaResolver\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nparsesqllineage(sql, *, platform, platforminstance=None, env=\'PROD\', defaultdb=None, defaultschema=None)\\\\n\\\\nParameters\\\\nSqlParsingResult\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclose()\\\\n\\\\nReturn type\\\\nDataHubGraph\\\\n\\\\n\\\\n\\\\n\\\\n\\"}}&gt;","sidebar":"overviewSidebar"},"python-sdk/models":{"id":"python-sdk/models","title":"Models","description":"\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AccessClass(roles=None)\\\\nBases\\\\nroles (Optional[List[RoleAssociationClass]])\\\\n\\\\n\\\\n\\\\n\\\\nproperty roles object\\\\nThe various access levels\\\\n\\\\n\\\\nPRIVATE = \'PRIVATE\'\\\\n\\\\n\\\\n\\\\nPUBLIC = \'PUBLIC\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ActorsClass(users=None)\\\\nBases\\\\nusers (Optional[List[RoleUserClass]])\\\\n\\\\n\\\\n\\\\n\\\\nproperty users DictWrapper\\\\nArray field type.\\\\n\\\\nParameters None | List[str]\\\\nList of types this array holds.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AspectBag\\\\nBases AccessClass\\\\n\\\\n\\\\n\\\\nactors AssertionActionsClass\\\\n\\\\n\\\\n\\\\nassertionInfo AssertionKeyClass\\\\n\\\\n\\\\n\\\\nassertionRunEvent BrowsePathsClass\\\\n\\\\n\\\\n\\\\nbrowsePathsV2 ChartInfoClass\\\\n\\\\n\\\\n\\\\nchartKey ChartQueryClass\\\\n\\\\n\\\\n\\\\nchartUsageStatistics ContainerClass\\\\n\\\\n\\\\n\\\\ncontainerKey ContainerPropertiesClass\\\\n\\\\n\\\\n\\\\ncorpGroupEditableInfo CorpGroupInfoClass\\\\n\\\\n\\\\n\\\\ncorpGroupKey CorpUserCredentialsClass\\\\n\\\\n\\\\n\\\\ncorpUserEditableInfo CorpUserInfoClass\\\\n\\\\n\\\\n\\\\ncorpUserKey CorpUserSettingsClass\\\\n\\\\n\\\\n\\\\ncorpUserStatus CostClass\\\\n\\\\n\\\\n\\\\ndashboardInfo DashboardKeyClass\\\\n\\\\n\\\\n\\\\ndashboardUsageStatistics DataContractKeyClass\\\\n\\\\n\\\\n\\\\ndataContractProperties DataContractStatusClass\\\\n\\\\n\\\\n\\\\ndataFlowInfo DataFlowKeyClass\\\\n\\\\n\\\\n\\\\ndataHubAccessTokenInfo DataHubAccessTokenKeyClass\\\\n\\\\n\\\\n\\\\ndataHubExecutionRequestInput ExecutionRequestKeyClass\\\\n\\\\n\\\\n\\\\ndataHubExecutionRequestResult ExecutionRequestSignalClass\\\\n\\\\n\\\\n\\\\ndataHubIngestionSourceInfo DataHubIngestionSourceKeyClass\\\\n\\\\n\\\\n\\\\ndataHubPolicyInfo DataHubPolicyKeyClass\\\\n\\\\n\\\\n\\\\ndataHubRetentionConfig DataHubRetentionKeyClass\\\\n\\\\n\\\\n\\\\ndataHubRoleInfo DataHubRoleKeyClass\\\\n\\\\n\\\\n\\\\ndataHubSecretKey DataHubSecretValueClass\\\\n\\\\n\\\\n\\\\ndataHubStepStateKey DataHubStepStatePropertiesClass\\\\n\\\\n\\\\n\\\\ndataHubUpgradeKey DataHubUpgradeRequestClass\\\\n\\\\n\\\\n\\\\ndataHubUpgradeResult DataHubViewInfoClass\\\\n\\\\n\\\\n\\\\ndataHubViewKey DataJobInfoClass\\\\n\\\\n\\\\n\\\\ndataJobInputOutput DataJobKeyClass\\\\n\\\\n\\\\n\\\\ndataPlatformInfo DataPlatformInstanceClass\\\\n\\\\n\\\\n\\\\ndataPlatformInstanceKey DataPlatformInstancePropertiesClass\\\\n\\\\n\\\\n\\\\ndataPlatformKey DataProcessInfoClass\\\\n\\\\n\\\\n\\\\ndataProcessInstanceInput DataProcessInstanceKeyClass\\\\n\\\\n\\\\n\\\\ndataProcessInstanceOutput DataProcessInstancePropertiesClass\\\\n\\\\n\\\\n\\\\ndataProcessInstanceRelationships DataProcessInstanceRunEventClass\\\\n\\\\n\\\\n\\\\ndataProcessKey DataProductKeyClass\\\\n\\\\n\\\\n\\\\ndataProductProperties DatahubIngestionCheckpointClass\\\\n\\\\n\\\\n\\\\ndatahubIngestionRunSummary DatasetDeprecationClass\\\\n\\\\n\\\\n\\\\ndatasetKey DatasetProfileClass\\\\n\\\\n\\\\n\\\\ndatasetProperties DatasetUpstreamLineageClass\\\\n\\\\n\\\\n\\\\ndatasetUsageStatistics DeprecationClass\\\\n\\\\n\\\\n\\\\ndomainKey DomainPropertiesClass\\\\n\\\\n\\\\n\\\\ndomains EditableChartPropertiesClass\\\\n\\\\n\\\\n\\\\neditableContainerProperties EditableDashboardPropertiesClass\\\\n\\\\n\\\\n\\\\neditableDataFlowProperties EditableDataJobPropertiesClass\\\\n\\\\n\\\\n\\\\neditableDatasetProperties EditableMLFeaturePropertiesClass\\\\n\\\\n\\\\n\\\\neditableMlFeatureTableProperties EditableMLModelGroupPropertiesClass\\\\n\\\\n\\\\n\\\\neditableMlModelProperties EditableMLPrimaryKeyPropertiesClass\\\\n\\\\n\\\\n\\\\neditableNotebookProperties EditableSchemaMetadataClass\\\\n\\\\n\\\\n\\\\nembed GlobalSettingsInfoClass\\\\n\\\\n\\\\n\\\\nglobalSettingsKey GlobalTagsClass\\\\n\\\\n\\\\n\\\\nglossaryNodeInfo GlossaryNodeKeyClass\\\\n\\\\n\\\\n\\\\nglossaryRelatedTerms GlossaryTermInfoClass\\\\n\\\\n\\\\n\\\\nglossaryTermKey GlossaryTermsClass\\\\n\\\\n\\\\n\\\\ngroupMembership InputFieldsClass\\\\n\\\\n\\\\n\\\\ninstitutionalMemory IntendedUseClass\\\\n\\\\n\\\\n\\\\ninviteToken InviteTokenKeyClass\\\\n\\\\n\\\\n\\\\nmlFeatureKey MLFeaturePropertiesClass\\\\n\\\\n\\\\n\\\\nmlFeatureTableKey MLFeatureTablePropertiesClass\\\\n\\\\n\\\\n\\\\nmlHyperParam MLMetricClass\\\\n\\\\n\\\\n\\\\nmlModelCaveatsAndRecommendations MLModelDeploymentKeyClass\\\\n\\\\n\\\\n\\\\nmlModelDeploymentProperties EthicalConsiderationsClass\\\\n\\\\n\\\\n\\\\nmlModelEvaluationData MLModelFactorPromptsClass\\\\n\\\\n\\\\n\\\\nmlModelGroupKey MLModelGroupPropertiesClass\\\\n\\\\n\\\\n\\\\nmlModelKey MetricsClass\\\\n\\\\n\\\\n\\\\nmlModelProperties QuantitativeAnalysesClass\\\\n\\\\n\\\\n\\\\nmlModelTrainingData MLPrimaryKeyKeyClass\\\\n\\\\n\\\\n\\\\nmlPrimaryKeyProperties NativeGroupMembershipClass\\\\n\\\\n\\\\n\\\\nnotebookContent NotebookInfoClass\\\\n\\\\n\\\\n\\\\nnotebookKey OperationClass\\\\n\\\\n\\\\n\\\\norigin OwnershipClass\\\\n\\\\n\\\\n\\\\nownershipTypeInfo OwnershipTypeKeyClass\\\\n\\\\n\\\\n\\\\npostInfo PostKeyClass\\\\n\\\\n\\\\n\\\\nqueryKey QueryPropertiesClass\\\\n\\\\n\\\\n\\\\nquerySubjects RoleKeyClass\\\\n\\\\n\\\\n\\\\nroleMembership RolePropertiesClass\\\\n\\\\n\\\\n\\\\nschemaFieldKey SchemaMetadataClass\\\\n\\\\n\\\\n\\\\nsiblings SourceCodeClass\\\\n\\\\n\\\\n\\\\nstatus SubTypesClass\\\\n\\\\n\\\\n\\\\ntagKey TagPropertiesClass\\\\n\\\\n\\\\n\\\\ntelemetryClientId TelemetryKeyClass\\\\n\\\\n\\\\n\\\\ntestInfo TestKeyClass\\\\n\\\\n\\\\n\\\\ntestResults UpstreamLineageClass\\\\n\\\\n\\\\n\\\\nversionInfo ViewPropertiesClass\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionActionClass(type)\\\\nBases\\\\ntype (Union[str, AssertionActionTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nRAISEINCIDENT = \'RAISEINCIDENT\'\\\\n\\\\n\\\\n\\\\nRESOLVEINCIDENT = \'RESOLVEINCIDENT\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionActionsClass(onSuccess=None, onFailure=None)\\\\nBases\\\\n\\\\nonSuccess (Optional[List[AssertionActionClass]])\\\\nonFailure (Optional[List[AssertionActionClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty onFailure List[AssertionActionClass]\\\\nActions to be executed on successful assertion run.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionInfoClass(type, customProperties=None, externalUrl=None, datasetAssertion=None, freshnessAssertion=None, volumeAssertion=None, sqlAssertion=None, schemaAssertion=None, source=None, description=None)\\\\nBases\\\\n\\\\ntype (Union[str, AssertionTypeClass])\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\ndatasetAssertion (Optional[DatasetAssertionInfoClass])\\\\nfreshnessAssertion (Optional[FreshnessAssertionInfoClass])\\\\nvolumeAssertion (Optional[VolumeAssertionInfoClass])\\\\nsqlAssertion (Optional[SqlAssertionInfoClass])\\\\nschemaAssertion (Optional[SchemaAssertionInfoClass])\\\\nsource (Optional[AssertionSourceClass])\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty customProperties None | DatasetAssertionInfoClass\\\\nA Dataset Assertion definition. This field is populated when the type is DATASET.\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty freshnessAssertion None | SchemaAssertionInfoClass\\\\nAn schema Assertion definition. This field is populated when the type is DATASCHEMA\\\\n\\\\n\\\\n\\\\nproperty source None | SqlAssertionInfoClass\\\\nA SQL Assertion definition. This field is populated when the type is SQL.\\\\n\\\\n\\\\n\\\\nproperty type None | VolumeAssertionInfoClass\\\\nAn Volume Assertion definition. This field is populated when the type is VOLUME.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionKeyClass(assertionId)\\\\nBases\\\\nassertionId (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty assertionId DictWrapper\\\\nThe result of running an assertion\\\\n\\\\nParameters None | float\\\\nObserved aggregate value for evaluated batch\\\\n\\\\n\\\\n\\\\nproperty error None | str\\\\nExternal URL where full results are available. Only present when assertion source is not native.\\\\n\\\\n\\\\n\\\\nproperty missingCount None | Dict[str, str]\\\\nOther results of evaluation\\\\n\\\\n\\\\n\\\\nproperty rowCount str | AssertionResultTypeClass\\\\nThe final result, e.g. either SUCCESS, FAILURE, or ERROR.\\\\n\\\\n\\\\n\\\\nproperty unexpectedCount DictWrapper\\\\nAn error encountered when evaluating an AssertionResult\\\\n\\\\nParameters None | Dict[str, str]\\\\nAdditional metadata depending on the type of error\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nINSUFFICIENTDATA = \'INSUFFICIENTDATA\'\\\\n\\\\n\\\\n\\\\nINVALIDPARAMETERS = \'INVALIDPARAMETERS\'\\\\n\\\\n\\\\n\\\\nINVALIDSOURCETYPE = \'INVALIDSOURCETYPE\'\\\\n\\\\n\\\\n\\\\nSOURCECONNECTIONERROR = \'SOURCECONNECTIONERROR\'\\\\n\\\\n\\\\n\\\\nSOURCEQUERYFAILED = \'SOURCEQUERYFAILED\'\\\\n\\\\n\\\\n\\\\nUNKNOWNERROR = \'UNKNOWNERROR\'\\\\n\\\\n\\\\n\\\\nUNSUPPORTEDPLATFORM = \'UNSUPPORTEDPLATFORM\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionResultTypeClass\\\\nBases Aspect\\\\nAn event representing the current status of evaluating an assertion on a batch.\\\\nAssertionRunEvent should be used for reporting the status of a run as an assertion evaluation progresses.\\\\n\\\\nParameters ClassVar[str] = \'timeseries\'\\\\n\\\\n\\\\n\\\\nproperty asserteeUrn str\\\\n\\\\n\\\\n\\\\nproperty batchSpec None | TimeWindowSizeClass\\\\nGranularity of the event if applicable\\\\n\\\\n\\\\n\\\\nproperty messageId PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty result str\\\\nNative (platform-specific) identifier for this run\\\\n\\\\n\\\\n\\\\nproperty runtimeContext str | AssertionRunStatusClass\\\\nThe status of the assertion run as per this timeseries event.\\\\n\\\\n\\\\n\\\\nproperty timestampMillis object\\\\nThe lifecycle status of an assertion run.\\\\n\\\\n\\\\nCOMPLETE = \'COMPLETE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionSourceClass(type)\\\\nBases\\\\ntype (Union[str, AssertionSourceTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nEXTERNAL = \'EXTERNAL\'\\\\n\\\\n\\\\n\\\\nINFERRED = \'INFERRED\'\\\\n\\\\n\\\\n\\\\nNATIVE = \'NATIVE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionStdAggregationClass\\\\nBases object\\\\nA boolean operator that is applied on the input to an assertion, after an aggregation function has been applied.\\\\n\\\\n\\\\nBETWEEN = \'BETWEEN\'\\\\n\\\\n\\\\n\\\\nCONTAIN = \'CONTAIN\'\\\\n\\\\n\\\\n\\\\nENDWITH = \'ENDWITH\'\\\\n\\\\n\\\\n\\\\nEQUALTO = \'EQUALTO\'\\\\n\\\\n\\\\n\\\\nGREATERTHAN = \'GREATERTHAN\'\\\\n\\\\n\\\\n\\\\nGREATERTHANOREQUALTO = \'GREATERTHANOREQUALTO\'\\\\n\\\\n\\\\n\\\\nIN = \'IN\'\\\\n\\\\n\\\\n\\\\nLESSTHAN = \'LESSTHAN\'\\\\n\\\\n\\\\n\\\\nLESSTHANOREQUALTO = \'LESSTHANOREQUALTO\'\\\\n\\\\n\\\\n\\\\nNOTIN = \'NOTIN\'\\\\n\\\\n\\\\n\\\\nNOTNULL = \'NOTNULL\'\\\\n\\\\n\\\\n\\\\nREGEXMATCH = \'REGEXMATCH\'\\\\n\\\\n\\\\n\\\\nSTARTWITH = \'STARTWITH\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionStdParameterClass(value, type)\\\\nBases\\\\n\\\\nvalue (str)\\\\ntype (Union[str, AssertionStdParameterTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty type str\\\\nThe parameter value\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionStdParameterTypeClass\\\\nBases DictWrapper\\\\nParameters for AssertionStdOperators.\\\\n\\\\nParameters None | AssertionStdParameterClass\\\\nThe maxValue parameter of an assertion\\\\n\\\\n\\\\n\\\\nproperty minValue None | AssertionStdParameterClass\\\\nThe value parameter of an assertion\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AssertionTypeClass\\\\nBases object\\\\nAn enum to represent a type of change in an assertion value, metric, or measurement.\\\\n\\\\n\\\\nABSOLUTE = \'ABSOLUTE\'\\\\n\\\\n\\\\n\\\\nPERCENTAGE = \'PERCENTAGE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.AuditStampClass(time, actor, impersonator=None, message=None)\\\\nBases\\\\n\\\\ntime (int)\\\\nactor (str)\\\\nimpersonator (Optional[str])\\\\nmessage (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty actor None | str\\\\nThe entity (e.g. a service URN) which performs the change on behalf of the Actor and must be authorized to act as the Actor.\\\\n\\\\n\\\\n\\\\nproperty message\\\\nAdditional context around how DataHub was informed of the particular change. For example\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty time object\\\\nThe various types of support azkaban jobs\\\\n\\\\n\\\\nCOMMAND = \'COMMAND\'\\\\n\\\\n\\\\n\\\\nGLUE = \'GLUE\'\\\\n\\\\n\\\\n\\\\nHADOOPJAVA = \'HADOOPJAVA\'\\\\n\\\\n\\\\n\\\\nHADOOPSHELL = \'HADOOPSHELL\'\\\\n\\\\n\\\\n\\\\nHIVE = \'HIVE\'\\\\n\\\\n\\\\n\\\\nPIG = \'PIG\'\\\\n\\\\n\\\\n\\\\nSQL = \'SQL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.BaseDataClass(dataset, motivation=None, preProcessing=None)\\\\nBases\\\\n\\\\ndataset (str)\\\\nmotivation (Optional[str])\\\\npreProcessing (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty dataset None | str\\\\nWhy was this dataset chosen?\\\\n\\\\n\\\\n\\\\nproperty preProcessing DictWrapper\\\\nA batch on which certain operations, e.g. data quality evaluation, is done.\\\\n\\\\nParameters Dict[str, str]\\\\nCustom property bag.\\\\n\\\\n\\\\n\\\\nproperty limit None | str\\\\nThe native identifier as specified by the system operating on the batch.\\\\n\\\\n\\\\n\\\\nproperty query DictWrapper\\\\nSchema text of binary JSON schema.\\\\n\\\\nParameters str\\\\nThe native schema text for binary JSON file format.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.BooleanTypeClass\\\\nBases DictWrapper\\\\nRepresents a single level in an entity\\\\u2019s browsePathV2\\\\n\\\\nParameters str\\\\nThe ID of the browse path entry. This is what gets stored in the index.\\\\nIf there\\\\u2019s an urn associated with this entry, id and urn will be the same\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nShared aspect containing Browse Paths to be indexed for an entity.\\\\n\\\\nParameters List[str]\\\\nA list of valid browse paths for the entity.\\\\nBrowse paths are expected to be forward slash-separated strings. For example Aspect\\\\nShared aspect containing a Browse Path to be indexed for an entity.\\\\n\\\\nParameters List[BrowsePathEntryClass]\\\\nA valid browse path for the entity. This field is provided by DataHub by default.\\\\nThis aspect is a newer version of browsePaths where we can encode more information in the path.\\\\nThis path is also based on containers for a given entity if it has containers.\\\\nThis is stored in elasticsearch as unit-separator delimited strings and only includes platform specific folders or containers.\\\\nThese paths should not include high level info captured elsewhere ie. Platform and Environment.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.BytesTypeClass\\\\nBases object\\\\n\\\\n\\\\nDAY = \'DAY\'\\\\n\\\\n\\\\n\\\\nHOUR = \'HOUR\'\\\\n\\\\n\\\\n\\\\nMINUTE = \'MINUTE\'\\\\n\\\\n\\\\n\\\\nMONTH = \'MONTH\'\\\\n\\\\n\\\\n\\\\nQUARTER = \'QUARTER\'\\\\n\\\\n\\\\n\\\\nSECOND = \'SECOND\'\\\\n\\\\n\\\\n\\\\nWEEK = \'WEEK\'\\\\n\\\\n\\\\n\\\\nYEAR = \'YEAR\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CaveatDetailsClass(needsFurtherTesting=None, caveatDescription=None, groupsNotRepresented=None)\\\\nBases\\\\n\\\\nneedsFurtherTesting (Optional[bool])\\\\ncaveatDescription (Optional[str])\\\\ngroupsNotRepresented (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty caveatDescription Given gender classes are binary (male/not male), which we include as male/female. Further work needed to evaluate across a spectrum of genders.\\\\n\\\\n\\\\n\\\\nproperty groupsNotRepresented None | bool\\\\nDid the results suggest any further testing?\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CaveatsAndRecommendationsClass(caveats=None, recommendations=None, idealDatasetCharacteristics=None)\\\\nBases\\\\n\\\\ncaveats (Optional[CaveatDetailsClass])\\\\nrecommendations (Optional[str])\\\\nidealDatasetCharacteristics (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty caveats None | List[str]\\\\nIdeal characteristics of an evaluation dataset for this MLModel\\\\n\\\\n\\\\n\\\\nproperty recommendations DictWrapper\\\\nData captured on a resource/association/sub-resource level giving insight into when that resource/association/sub-resource moved into various lifecycle stages, and who acted to move it into those lifecycle stages. The recommended best practice is to include this record in your record schema, and annotate its fields as @readOnly in your resource. See linkedin/rest.li\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted AuditStampClass\\\\nAn AuditStamp corresponding to the last modification of this resource/association/sub-resource. If no modification has happened since creation, lastModified should be the same as created. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ChangeTypeClass\\\\nBases DictWrapper\\\\nChart cell in a notebook, which will present content in chart format\\\\n\\\\nParameters str\\\\nUnique id for the cell. This id should be globally unique for a Notebook tool even when there are multiple deployments of it. As an example, Notebook URL could be used here for QueryBook such as \\\\u2018querybook.com/notebook/773/?cellId=1234\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty cellTitle ChangeAuditStampsClass\\\\nCaptures information about who created/last modified/deleted this Notebook cell and when\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ChartInfoClass(title, description, lastModified, customProperties=None, externalUrl=None, chartUrl=None, inputs=None, inputEdges=None, type=None, access=None, lastRefreshed=None)\\\\nBases\\\\n\\\\ntitle (str)\\\\ndescription (str)\\\\nlastModified (ChangeAuditStampsClass)\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\nchartUrl (Optional[str])\\\\ninputs (Optional[List[str]])\\\\ninputEdges (Optional[List[EdgeClass]])\\\\ntype (Union[None, str, ChartTypeClass])\\\\naccess (Union[None, str, AccessLevelClass])\\\\nlastRefreshed (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty access None | str\\\\nURL for the chart. This could be used as an external link on DataHub to allow users access/view the chart\\\\n\\\\n\\\\n\\\\nproperty customProperties str\\\\nDetailed description about the chart\\\\n\\\\n\\\\n\\\\nproperty externalUrl None | List[EdgeClass]\\\\nData sources for the chart\\\\n\\\\n\\\\n\\\\nproperty inputs ChangeAuditStampsClass\\\\nCaptures information about who created/last modified/deleted this chart and when\\\\n\\\\n\\\\n\\\\nproperty lastRefreshed str\\\\nTitle of the chart\\\\n\\\\n\\\\n\\\\nproperty type Aspect\\\\nKey for a Chart\\\\n\\\\nParameters str\\\\nUnique id for the chart. This id should be globally unique for a dashboarding tool even when there are multiple deployments of it. As an example, chart URL could be used here for Looker such as \\\\u2018looker.linkedin.com/looks/1234\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty dashboardTool Aspect\\\\nInformation for chart query which is used for getting data of the chart\\\\n\\\\nParameters str\\\\nRaw query to build a chart from input datasets\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nLOOKML = \'LOOKML\'\\\\n\\\\n\\\\n\\\\nSQL = \'SQL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ChartSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[ChartKeyClass, ChartInfoClass, ChartQueryClass, EditableChartPropertiesClass, OwnershipClass, StatusClass, GlobalTagsClass, BrowsePathsClass, GlossaryTermsClass, InstitutionalMemoryClass, DataPlatformInstanceClass, BrowsePathsV2Class]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ChartTypeClass\\\\nBases Aspect\\\\nExperimental (Subject to breaking change) \\\\u2013 Stats corresponding to chart\\\\u2019s usage.\\\\nIf this aspect represents the latest snapshot of the statistics about a Chart, the eventGranularity field should be null.\\\\nIf this aspect represents a bucketed window of usage statistics (e.g. over a day), then the eventGranularity field should be set accordingly.\\\\n\\\\nParameters ClassVar[str] = \'timeseries\'\\\\n\\\\n\\\\n\\\\nproperty eventGranularity None | str\\\\nThe optional messageId, if provided serves as a custom user-defined unique identifier for an aspect value.\\\\n\\\\n\\\\n\\\\nproperty partitionSpec int\\\\nThe event timestamp field as epoch at UTC in milli seconds.\\\\n\\\\n\\\\n\\\\nproperty uniqueUserCount None | List[ChartUserUsageCountsClass]\\\\nUsers within this bucket, with frequency counts\\\\n\\\\n\\\\n\\\\nproperty viewsCount DictWrapper\\\\nRecords a single user\\\\u2019s usage counts for a given resource\\\\n\\\\nParameters str\\\\nThe unique id of the user.\\\\n\\\\n\\\\n\\\\nproperty viewsCount object\\\\nThe matching condition in a filter criterion\\\\n\\\\n\\\\nCONTAIN = \'CONTAIN\'\\\\n\\\\n\\\\n\\\\nENDWITH = \'ENDWITH\'\\\\n\\\\n\\\\n\\\\nEQUAL = \'EQUAL\'\\\\n\\\\n\\\\n\\\\nEXISTS = \'EXISTS\'\\\\n\\\\n\\\\n\\\\nGREATERTHAN = \'GREATERTHAN\'\\\\n\\\\n\\\\n\\\\nGREATERTHANOREQUALTO = \'GREATERTHANOREQUALTO\'\\\\n\\\\n\\\\n\\\\nIN = \'IN\'\\\\n\\\\n\\\\n\\\\nISNULL = \'ISNULL\'\\\\n\\\\n\\\\n\\\\nLESSTHAN = \'LESSTHAN\'\\\\n\\\\n\\\\n\\\\nLESSTHANOREQUALTO = \'LESSTHANOREQUALTO\'\\\\n\\\\n\\\\n\\\\nSTARTWITH = \'STARTWITH\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ConjunctiveCriterionClass(and)\\\\nBases\\\\nand (List[CriterionClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty and Aspect\\\\nLink from an asset to its parent container\\\\n\\\\nParameters str\\\\nThe parent container of an asset\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ContainerKeyClass(guid=None)\\\\nBases\\\\nguid (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty guid Aspect\\\\nInformation about a Asset Container as received from a 3rd party source system\\\\n\\\\nParameters None | TimeStampClass\\\\nA timestamp documenting when the asset was created in the source Data Platform (not on DataHub)\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDescription of the Asset Container as it exists inside a source system\\\\n\\\\n\\\\n\\\\nproperty externalUrl None | TimeStampClass\\\\nA timestamp documenting when the asset was last modified in the source Data Platform (not on DataHub)\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nFully-qualified name of the Container\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpGroupEditableInfoClass(description=None, pictureLink=None, slack=None, email=None)\\\\nBases\\\\n\\\\ndescription (Optional[str])\\\\npictureLink (Optional[str])\\\\nslack (Optional[str])\\\\nemail (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nEmail address to contact the group\\\\n\\\\n\\\\n\\\\nproperty pictureLink None | str\\\\nSlack channel for the group\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpGroupInfoClass(admins, members, groups, displayName=None, email=None, description=None, slack=None, created=None)\\\\nBases\\\\n\\\\nadmins (List[str])\\\\nmembers (List[str])\\\\ngroups (List[str])\\\\ndisplayName (Optional[str])\\\\nemail (Optional[str])\\\\ndescription (Optional[str])\\\\nslack (Optional[str])\\\\ncreated (Optional[AuditStampClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty admins None | AuditStampClass\\\\nCreated Audit stamp\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nThe name of the group.\\\\n\\\\n\\\\n\\\\nproperty email List[str]\\\\nList of groups in this group.\\\\nDeprecated! This field is unused.\\\\n\\\\n\\\\n\\\\nproperty members None | str\\\\nSlack channel for the group\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpGroupKeyClass(name)\\\\nBases\\\\nname (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty name DictWrapper\\\\nA metadata snapshot for a specific CorpGroup entity.\\\\n\\\\nParameters List[CorpGroupKeyClass | CorpGroupInfoClass | GlobalTagsClass | StatusClass]\\\\nThe list of metadata aspects associated with the LdapUser. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn DictWrapper\\\\nSettings for a user around the appearance of their DataHub UI\\\\n\\\\nParameters None | bool\\\\nFlag whether the user should see a homepage with only datasets, charts and dashboards. Intended for users\\\\nwho have less operational use cases for the datahub tool.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpUserCredentialsClass(salt, hashedPassword, passwordResetToken=None, passwordResetTokenExpirationTimeMillis=None)\\\\nBases\\\\n\\\\nsalt (str)\\\\nhashedPassword (str)\\\\npasswordResetToken (Optional[str])\\\\npasswordResetTokenExpirationTimeMillis (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty hashedPassword None | str\\\\nOptional token needed to reset a user\\\\u2019s password. Can only be set by the admin.\\\\n\\\\n\\\\n\\\\nproperty passwordResetTokenExpirationTimeMillis str\\\\nSalt used to hash password\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpUserEditableInfoClass(aboutMe=None, teams=None, skills=None, pictureLink=None, displayName=None, title=None, slack=None, phone=None, email=None)\\\\nBases\\\\n\\\\naboutMe (Optional[str])\\\\nteams (Optional[List[str]])\\\\nskills (Optional[List[str]])\\\\npictureLink (Optional[str])\\\\ndisplayName (Optional[str])\\\\ntitle (Optional[str])\\\\nslack (Optional[str])\\\\nphone (Optional[str])\\\\nemail (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aboutMe None | str\\\\nDataHub-native display name\\\\n\\\\n\\\\n\\\\nproperty email None | str\\\\nPhone number to contact the user\\\\n\\\\n\\\\n\\\\nproperty pictureLink List[str]\\\\nSkills that the user possesses e.g. Machine Learning\\\\n\\\\n\\\\n\\\\nproperty slack List[str]\\\\nTeams that the user belongs to e.g. Metadata\\\\n\\\\n\\\\n\\\\nproperty title Aspect\\\\nLinkedin corp user information\\\\n\\\\nParameters bool\\\\n//iwww.corp.linkedin.com/wiki/cf/display/GTSD/Accessing+Active+Directory+via+LDAP+tools\\\\n\\\\nType\\\\nhttps\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty countryCode Dict[str, str]\\\\nCustom property bag.\\\\n\\\\n\\\\n\\\\nproperty departmentId None | str\\\\ndepartment name this user belong to\\\\n\\\\n\\\\n\\\\nproperty displayName None | str\\\\nemail address of this user\\\\n\\\\n\\\\n\\\\nproperty firstName None | str\\\\nCommon name of this user, format is firstName + lastName (split by a whitespace)\\\\n\\\\n\\\\n\\\\nproperty lastName None | str\\\\ndirect manager of this user\\\\n\\\\n\\\\n\\\\nproperty title Aspect\\\\nKey for a CorpUser\\\\n\\\\nParameters str\\\\nThe name of the AD/LDAP user.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpUserSettingsClass(appearance, views=None)\\\\nBases\\\\n\\\\nappearance (CorpUserAppearanceSettingsClass)\\\\nviews (Optional[CorpUserViewsSettingsClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty appearance None | CorpUserViewsSettingsClass\\\\nUser preferences for the Views feature.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpUserSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[CorpUserKeyClass, CorpUserInfoClass, CorpUserEditableInfoClass, CorpUserStatusClass, GroupMembershipClass, GlobalTagsClass, StatusClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpUserStatusClass(status, lastModified)\\\\nBases\\\\n\\\\nstatus (str)\\\\nlastModified (AuditStampClass)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty lastModified str\\\\nStatus of the user, e.g. PROVISIONED / ACTIVE / SUSPENDED\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CorpUserViewsSettingsClass(defaultView=None)\\\\nBases\\\\ndefaultView (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty defaultView Aspect\\\\n\\\\nParameters CostCostClass\\\\n\\\\n\\\\n\\\\nproperty costType DictWrapper\\\\n\\\\nParameters None | str\\\\n\\\\n\\\\n\\\\nproperty costId str | CostCostDiscriminatorClass\\\\nContains the name of the field that has its value set.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CostCostDiscriminatorClass\\\\nBases object\\\\nType of Cost Code\\\\n\\\\n\\\\nORGCOSTTYPE = \'ORGCOSTTYPE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.CriterionClass(field, value, values=None, condition=None, negated=None)\\\\nBases\\\\n\\\\nfield (str)\\\\nvalue (str)\\\\nvalues (Optional[List[str]])\\\\ncondition (Union[str, ConditionClass, None])\\\\nnegated (Optional[bool])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty condition str\\\\nThe name of the field that the criterion refers to\\\\n\\\\n\\\\n\\\\nproperty negated str\\\\nThe value of the intended field\\\\n\\\\n\\\\n\\\\nproperty values Aspect\\\\nInformation about a dashboard\\\\n\\\\nParameters None | str | AccessLevelClass\\\\nAccess level for the dashboard\\\\n\\\\n\\\\n\\\\nproperty chartEdges List[str]\\\\nCharts in a dashboard\\\\nDeprecated! Use chartEdges instead.\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nURL for the dashboard. This could be used as an external link on DataHub to allow users access/view the dashboard\\\\n\\\\n\\\\n\\\\nproperty datasetEdges List[str]\\\\nDatasets consumed by a dashboard\\\\nDeprecated! Use datasetEdges instead.\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty lastModified None | int\\\\nThe time when this dashboard last refreshed\\\\n\\\\n\\\\n\\\\nproperty title Aspect\\\\nKey for a Dashboard\\\\n\\\\nParameters str\\\\nUnique id for the dashboard. This id should be globally unique for a dashboarding tool even when there are multiple deployments of it. As an example, dashboard URL could be used here for Looker such as \\\\u2018looker.linkedin.com/dashboards/1234\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty dashboardTool DictWrapper\\\\nA metadata snapshot for a specific Dashboard entity.\\\\n\\\\nParameters List[DashboardKeyClass | DashboardInfoClass | EditableDashboardPropertiesClass | OwnershipClass | StatusClass | GlobalTagsClass | BrowsePathsClass | GlossaryTermsClass | InstitutionalMemoryClass | DataPlatformInstanceClass | BrowsePathsV2Class]\\\\nThe list of metadata aspects associated with the dashboard. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nExperimental (Subject to breaking change) \\\\u2013 Stats corresponding to dashboard\\\\u2019s usage.\\\\nIf this aspect represents the latest snapshot of the statistics about a Dashboard, the eventGranularity field should be null.\\\\nIf this aspect represents a bucketed window of usage statistics (e.g. over a day), then the eventGranularity field should be set accordingly.\\\\n\\\\nParameters ClassVar[str] = \'timeseries\'\\\\n\\\\n\\\\n\\\\nproperty eventGranularity None | int\\\\nThe total number of dashboard executions (refreshes / syncs)\\\\n\\\\n\\\\n\\\\nproperty favoritesCount None | int\\\\nLast viewed at\\\\nThis should not be set in cases where statistics are windowed.\\\\n\\\\n\\\\n\\\\nproperty messageId PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty timestampMillis None | int\\\\nUnique user count\\\\n\\\\n\\\\n\\\\nproperty userCounts None | int\\\\nThe total number of times dashboard has been viewed\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DashboardUserUsageCountsClass(user, viewsCount=None, executionsCount=None, usageCount=None, userEmail=None)\\\\nBases\\\\n\\\\nuser (str)\\\\nviewsCount (Optional[int])\\\\nexecutionsCount (Optional[int])\\\\nusageCount (Optional[int])\\\\nuserEmail (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty executionsCount None | int\\\\nNormalized numeric metric representing user\\\\u2019s dashboard usage \\\\u2013 the number of times the user executed or viewed the dashboard.\\\\n\\\\n\\\\n\\\\nproperty user None | str\\\\nIf useremail is set, we attempt to resolve the user\\\\u2019s urn upon ingest\\\\n\\\\n\\\\n\\\\nproperty viewsCount Aspect\\\\nKey for a Data Contract\\\\n\\\\nParameters str\\\\nUnique id for the contract\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataContractPropertiesClass(entity, schema=None, freshness=None, dataQuality=None, rawContract=None)\\\\nBases\\\\n\\\\nentity (str)\\\\nschema (Optional[List[SchemaContractClass]])\\\\nfreshness (Optional[List[FreshnessContractClass]])\\\\ndataQuality (Optional[List[DataQualityContractClass]])\\\\nrawContract (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty dataQuality str\\\\nThe entity that this contract is associated with. Currently, we only support Dataset contracts, but\\\\nin the future we may also support Data Product level contracts.\\\\n\\\\n\\\\n\\\\nproperty freshness None | str\\\\nYAML-formatted contract definition\\\\n\\\\n\\\\n\\\\nproperty schema object\\\\n\\\\n\\\\nACTIVE = \'ACTIVE\'\\\\n\\\\n\\\\n\\\\nPENDING = \'PENDING\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataContractStatusClass(state, customProperties=None)\\\\nBases\\\\n\\\\nstate (Union[str, DataContractStateClass])\\\\ncustomProperties (Optional[Dict[str, str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty customProperties str | DataContractStateClass\\\\nThe latest state of the data contract\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataFlowInfoClass(name, customProperties=None, externalUrl=None, description=None, project=None, created=None, lastModified=None)\\\\nBases\\\\n\\\\nname (str)\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\ndescription (Optional[str])\\\\nproject (Optional[str])\\\\ncreated (Optional[TimeStampClass])\\\\nlastModified (Optional[TimeStampClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created Dict[str, str]\\\\nCustom property bag.\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty lastModified str\\\\nFlow name\\\\n\\\\n\\\\n\\\\nproperty project Aspect\\\\nKey for a Data Flow\\\\n\\\\nParameters str\\\\nCluster where the flow is executed\\\\n\\\\n\\\\n\\\\nproperty flowId str\\\\nWorkflow manager like azkaban, airflow which orchestrates the flow\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataFlowSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[DataFlowKeyClass, DataFlowInfoClass, EditableDataFlowPropertiesClass, OwnershipClass, StatusClass, GlobalTagsClass, BrowsePathsClass, GlossaryTermsClass, InstitutionalMemoryClass, DataPlatformInstanceClass, BrowsePathsV2Class]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataHubAccessTokenInfoClass(name, actorUrn, ownerUrn, createdAt, expiresAt=None, description=None)\\\\nBases\\\\n\\\\nname (str)\\\\nactorUrn (str)\\\\nownerUrn (str)\\\\ncreatedAt (int)\\\\nexpiresAt (Optional[int])\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty actorUrn int\\\\nWhen the token was created.\\\\n\\\\n\\\\n\\\\nproperty description None | int\\\\nWhen the token expires.\\\\n\\\\n\\\\n\\\\nproperty name str\\\\nUrn of the actor which created this access token.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataHubAccessTokenKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id DictWrapper\\\\nInformation used to filter DataHub actors.\\\\n\\\\nParameters bool\\\\nWhether the filter should apply to all groups.\\\\n\\\\n\\\\n\\\\nproperty allUsers None | List[str]\\\\nA specific set of groups to apply the policy to (disjunctive)\\\\n\\\\n\\\\n\\\\nproperty resourceOwners None | List[str]\\\\nDefine type of ownership for the policy\\\\n\\\\n\\\\n\\\\nproperty roles None | List[str]\\\\nA specific set of users to apply the policy to (disjunctive)\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataHubIngestionSourceConfigClass(recipe, version=None, executorId=None, debugMode=None, extraArgs=None)\\\\nBases\\\\n\\\\nrecipe (str)\\\\nversion (Optional[str])\\\\nexecutorId (Optional[str])\\\\ndebugMode (Optional[bool])\\\\nextraArgs (Optional[Dict[str, str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty debugMode None | str\\\\nThe id of the executor to use to execute the ingestion run\\\\n\\\\n\\\\n\\\\nproperty extraArgs str\\\\nThe JSON recipe to use for ingestion\\\\n\\\\n\\\\n\\\\nproperty version Aspect\\\\nInfo about a DataHub ingestion source\\\\n\\\\nParameters DataHubIngestionSourceConfigClass\\\\nParameters associated with the Ingestion Source\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nData Platform URN associated with the source\\\\n\\\\n\\\\n\\\\nproperty schedule str\\\\nThe type of the source itself, e.g. mysql, bigquery, bigquery-usage. Should match the recipe.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataHubIngestionSourceKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id DictWrapper\\\\nThe schedule associated with an ingestion source.\\\\n\\\\nParameters str\\\\nA cron-formatted execution interval, as a cron string, e.g.     \\\\n\\\\n\\\\n\\\\nproperty timezone _Aspect\\\\nInformation about a DataHub (UI) access policy.\\\\n\\\\nParameters DataHubActorFilterClass\\\\nThe actors that the policy applies to.\\\\n\\\\n\\\\n\\\\nproperty description str\\\\nDisplay name of the Policy\\\\n\\\\n\\\\n\\\\nproperty editable None | int\\\\nTimestamp when the policy was last updated\\\\n\\\\n\\\\n\\\\nproperty privileges None | DataHubResourceFilterClass\\\\nThe resource that the policy applies to. Not required for some \\\\u2018Platform\\\\u2019 privileges.\\\\n\\\\n\\\\n\\\\nproperty state str\\\\nThe type of policy\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubPolicyKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id DictWrapper\\\\nA metadata snapshot for DataHub Access Policy data.\\\\n\\\\nParameters List[DataHubPolicyKeyClass | DataHubPolicyInfoClass]\\\\nThe list of metadata aspects associated with the DataHub access policy.\\\\n\\\\n\\\\n\\\\nproperty urn DictWrapper\\\\nInformation used to filter DataHub resource.\\\\n\\\\nParameters bool\\\\nWhether the policy should be applied to all assets matching the filter.\\\\n\\\\n\\\\n\\\\nproperty filter None | List[str]\\\\nA specific set of resources to apply the policy to, e.g. asset urns\\\\n\\\\n\\\\n\\\\nproperty type _Aspect\\\\n\\\\nParameters RetentionClass\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubRetentionKeyClass(entityName, aspectName)\\\\nBases\\\\n\\\\nentityName (str)\\\\naspectName (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspectName str\\\\nEntity name to apply retention to.  (or empty) for applying defaults.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubRetentionSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[DataHubRetentionKeyClass, DataHubRetentionConfigClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubRoleInfoClass(name, description, editable=None)\\\\nBases\\\\n\\\\nname (str)\\\\ndescription (str)\\\\neditable (Optional[bool])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty description bool\\\\nWhether the role should be editable via the UI\\\\n\\\\n\\\\n\\\\nproperty name _Aspect\\\\nKey for a DataHub Role\\\\n\\\\nParameters str\\\\nA unique id for the DataHub role record. Generated on the server side at role creation time.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubSecretKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id _Aspect\\\\nThe value of a DataHub Secret\\\\n\\\\nParameters None | AuditStampClass\\\\nCreated Audit stamp\\\\n\\\\n\\\\n\\\\nproperty description str\\\\nThe display name for the secret\\\\n\\\\n\\\\n\\\\nproperty value _Aspect\\\\nKey for a DataHub Step State\\\\n\\\\nParameters str\\\\nA unique id for the state\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubStepStatePropertiesClass(lastModified, properties=None)\\\\nBases\\\\n\\\\nlastModified (AuditStampClass)\\\\nproperties (Optional[Dict[str, str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty lastModified Dict[str, str]\\\\nDescription of the secret\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubUpgradeKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id _Aspect\\\\nInformation collected when kicking off a DataHubUpgrade\\\\n\\\\nParameters int\\\\nTimestamp when we started this DataHubUpgrade\\\\n\\\\n\\\\n\\\\nproperty version _Aspect\\\\nInformation collected when a DataHubUpgrade successfully finishes\\\\n\\\\nParameters None | Dict[str, str]\\\\nResult map to place helpful information about this upgrade job\\\\n\\\\n\\\\n\\\\nproperty timestampMs DictWrapper\\\\nA View definition.\\\\n\\\\nParameters List[str]\\\\nThe Entity Types in the scope of the View.\\\\n\\\\n\\\\n\\\\nproperty filter _Aspect\\\\nInformation about a DataHub View. \\\\u2013 TODO\\\\n\\\\nname (str)\\\\ntype (Union[str, DataHubViewTypeClass])\\\\ndefinition (DataHubViewDefinitionClass)\\\\ncreated (AuditStampClass)\\\\nlastModified (AuditStampClass)\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created DataHubViewDefinitionClass\\\\nThe view itself\\\\n\\\\n\\\\n\\\\nproperty description AuditStampClass\\\\nAudit stamp capturing the time and actor who last modified the View.\\\\n\\\\n\\\\n\\\\nproperty name str | DataHubViewTypeClass\\\\nThe type of View\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataHubViewKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id object\\\\n\\\\n\\\\nGLOBAL = \'GLOBAL\'\\\\n\\\\n\\\\n\\\\nPERSONAL = \'PERSONAL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schema_classes.DataJobInfoClass(name, type, customProperties=None, externalUrl=None, description=None, flowUrn=None, created=None, lastModified=None, status=None)\\\\nBases\\\\n\\\\nname (str)\\\\ntype (Union[str, AzkabanJobTypeClass])\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\ndescription (Optional[str])\\\\nflowUrn (Optional[str])\\\\ncreated (Optional[TimeStampClass])\\\\nlastModified (Optional[TimeStampClass])\\\\nstatus (Union[None, str, JobStatusClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created Dict[str, str]\\\\nCustom property bag.\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty flowUrn None | TimeStampClass\\\\nA timestamp documenting when the asset was last modified in the source Data Platform (not on DataHub)\\\\n\\\\n\\\\n\\\\nproperty name None | str | JobStatusClass\\\\nStatus of the job - Deprecated for Data Process Instance model.\\\\n\\\\n\\\\n\\\\nproperty type AzkabanJobType is deprecated. Please use strings instead.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataJobInputOutputClass(inputDatasets, outputDatasets, inputDatasetEdges=None, outputDatasetEdges=None, inputDatajobs=None, inputDatajobEdges=None, inputDatasetFields=None, outputDatasetFields=None, fineGrainedLineages=None)\\\\nBases\\\\n\\\\ninputDatasets (List[str])\\\\noutputDatasets (List[str])\\\\ninputDatasetEdges (Optional[List[EdgeClass]])\\\\noutputDatasetEdges (Optional[List[EdgeClass]])\\\\ninputDatajobs (Optional[List[str]])\\\\ninputDatajobEdges (Optional[List[EdgeClass]])\\\\ninputDatasetFields (Optional[List[str]])\\\\noutputDatasetFields (Optional[List[str]])\\\\nfineGrainedLineages (Optional[List[FineGrainedLineageClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty fineGrainedLineages None | List[EdgeClass]\\\\nInput datajobs that this data job depends on\\\\n\\\\n\\\\n\\\\nproperty inputDatajobs None | List[EdgeClass]\\\\nInput datasets consumed by the data job during processing\\\\n\\\\n\\\\n\\\\nproperty inputDatasetFields List[str]\\\\nInput datasets consumed by the data job during processing\\\\nDeprecated! Use inputDatasetEdges instead.\\\\n\\\\n\\\\n\\\\nproperty outputDatasetEdges None | List[str]\\\\nFields of the output datasets this job writes to\\\\n\\\\n\\\\n\\\\nproperty outputDatasets Aspect\\\\nKey for a Data Job\\\\n\\\\nParameters str\\\\nStandardized data processing flow urn representing the flow for the job\\\\n\\\\n\\\\n\\\\nproperty jobId DictWrapper\\\\nA metadata snapshot for a specific DataJob entity.\\\\n\\\\nParameters List[DataJobKeyClass | DataJobInfoClass | DataJobInputOutputClass | EditableDataJobPropertiesClass | OwnershipClass | StatusClass | GlobalTagsClass | BrowsePathsClass | GlossaryTermsClass | InstitutionalMemoryClass | DataPlatformInstanceClass | BrowsePathsV2Class]\\\\nThe list of metadata aspects associated with the data job. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nInformation about a data platform\\\\n\\\\nParameters str\\\\nThe delimiter in the dataset names on the data platform, e.g. \\\\u2018/\\\\u2019 for HDFS and \\\\u2018.\\\\u2019 for Oracle\\\\n\\\\n\\\\n\\\\nproperty displayName None | str\\\\nThe URL for a logo associated with the platform\\\\n\\\\n\\\\n\\\\nproperty name str | PlatformTypeClass\\\\nPlatform type this data platform describes\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataPlatformInstanceClass(platform, instance=None)\\\\nBases\\\\n\\\\nplatform (str)\\\\ninstance (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty instance str\\\\nData Platform\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataPlatformInstanceKeyClass(platform, instance)\\\\nBases\\\\n\\\\nplatform (str)\\\\ninstance (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty instance str\\\\nData platform urn associated with the instance\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataPlatformInstancePropertiesClass(customProperties=None, externalUrl=None, name=None, description=None)\\\\nBases\\\\n\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\nname (Optional[str])\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDocumentation of the Data Platform Instance\\\\n\\\\n\\\\n\\\\nproperty externalUrl None | str\\\\nDisplay name of the Data Platform Instance\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataPlatformKeyClass(platformName)\\\\nBases\\\\nplatformName (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty platformName DictWrapper\\\\nA metadata snapshot for a specific dataplatform entity.\\\\n\\\\nParameters List[DataPlatformKeyClass | DataPlatformInfoClass]\\\\nThe list of metadata aspects associated with the data platform. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nThe inputs and outputs of this data process\\\\n\\\\nParameters None | List[str]\\\\nthe inputs of the data process\\\\n\\\\n\\\\n\\\\nproperty outputs Aspect\\\\nInformation about the inputs datasets of a Data process\\\\n\\\\nParameters List[str]\\\\nInput datasets to be consumed\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataProcessInstanceKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id Aspect\\\\nInformation about the outputs of a Data process\\\\n\\\\nParameters List[str]\\\\nOutput datasets to be produced\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataProcessInstancePropertiesClass(name, created, customProperties=None, externalUrl=None, type=None)\\\\nBases\\\\n\\\\nname (str)\\\\ncreated (AuditStampClass)\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\ntype (Union[None, str, DataProcessTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created Dict[str, str]\\\\nCustom property bag.\\\\n\\\\n\\\\n\\\\nproperty externalUrl str\\\\nProcess name\\\\n\\\\n\\\\n\\\\nproperty type Aspect\\\\nInformation about Data process relationships\\\\n\\\\nParameters None | str\\\\nThe parent DataProcessInstance where it belongs to.\\\\nIf it is a Airflow Task then it should belong to an Airflow Dag run as well\\\\nwhich will be another DataProcessInstance\\\\n\\\\n\\\\n\\\\nproperty parentTemplate List[str]\\\\nInput DataProcessInstance which triggered this dataprocess instance\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataProcessInstanceRunEventClass(timestampMillis, status, eventGranularity=None, partitionSpec=None, messageId=None, externalUrl=None, attempt=None, result=None)\\\\nBases\\\\n\\\\ntimestampMillis (int)\\\\nstatus (Union[str, DataProcessRunStatusClass])\\\\neventGranularity (Optional[TimeWindowSizeClass])\\\\npartitionSpec (Optional[PartitionSpecClass])\\\\nmessageId (Optional[str])\\\\nexternalUrl (Optional[str])\\\\nattempt (Optional[int])\\\\nresult (Optional[DataProcessInstanceRunResultClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nASPECTTYPE None | int\\\\nReturn the try number that this Instance Run is in\\\\n\\\\n\\\\n\\\\nproperty eventGranularity None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty messageId PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty result str | DataProcessRunStatusClass\\\\n\\\\n\\\\n\\\\nproperty timestampMillis DictWrapper\\\\n\\\\nParameters str\\\\nIt identifies the system where the native result comes from like Airflow, Azkaban, etc..\\\\n\\\\n\\\\n\\\\nproperty type Aspect\\\\nKey for a Data Process\\\\n\\\\nParameters str\\\\nProcess name i.e. an ETL job name\\\\n\\\\n\\\\n\\\\nproperty orchestrator Migrate towards something that can be validated like DataPlatform urn\\\\n\\\\n\\\\n\\\\nproperty origin object\\\\n\\\\n\\\\nCOMPLETE = \'COMPLETE\'\\\\n\\\\n\\\\n\\\\nSTARTED = \'STARTED\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataProcessSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[DataProcessKeyClass, OwnershipClass, DataProcessInfoClass, StatusClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataProcessTypeClass\\\\nBases DictWrapper\\\\nRepresents an association of assets to a Data Product.\\\\n\\\\nParameters None | AuditStampClass\\\\nAudit stamp containing who created this relationship edge and when\\\\n\\\\n\\\\n\\\\nproperty destinationUrn None | AuditStampClass\\\\nAudit stamp containing who last modified this relationship edge and when\\\\n\\\\n\\\\n\\\\nproperty properties None | str\\\\nUrn of the source of this relationship edge.\\\\nIf not specified, assumed to be the entity that this aspect belongs to.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataProductKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id Aspect\\\\nThe main properties of a Data Product\\\\n\\\\nParameters None | List[DataProductAssociationClass]\\\\nA list of assets that are part of this Data Product\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDocumentation of the dataset\\\\n\\\\n\\\\n\\\\nproperty externalUrl None | str\\\\nDisplay name of the Data Product\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DataQualityContractClass(assertion)\\\\nBases\\\\nassertion (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty assertion Aspect\\\\nCheckpoint of a datahub ingestion run for a given job.\\\\n\\\\nParameters ClassVar[str] = \'timeseries\'\\\\n\\\\n\\\\n\\\\nproperty config None | TimeWindowSizeClass\\\\nGranularity of the event if applicable\\\\n\\\\n\\\\n\\\\nproperty messageId PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty pipelineName str\\\\nThe id of the instance against which the ingestion pipeline ran.\\\\ne.g. str\\\\nThe run identifier of this job.\\\\n\\\\n\\\\n\\\\nproperty state int\\\\nThe event timestamp field as epoch at UTC in milli seconds.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatahubIngestionRunSummaryClass(timestampMillis, pipelineName, platformInstanceId, runId, runStatus, eventGranularity=None, partitionSpec=None, messageId=None, numWorkUnitsCommitted=None, numWorkUnitsCreated=None, numEvents=None, numEntities=None, numAspects=None, numSourceAPICalls=None, totalLatencySourceAPICalls=None, numSinkAPICalls=None, totalLatencySinkAPICalls=None, numWarnings=None, numErrors=None, numEntitiesSkipped=None, config=None, customsummary=None, softwareVersion=None, systemHostName=None, operatingSystemName=None, numProcessors=None, totalMemory=None, availableMemory=None)\\\\nBases\\\\n\\\\ntimestampMillis (int)\\\\npipelineName (str)\\\\nplatformInstanceId (str)\\\\nrunId (str)\\\\nrunStatus (Union[str, JobStatusClass])\\\\neventGranularity (Optional[TimeWindowSizeClass])\\\\npartitionSpec (Optional[PartitionSpecClass])\\\\nmessageId (Optional[str])\\\\nnumWorkUnitsCommitted (Optional[int])\\\\nnumWorkUnitsCreated (Optional[int])\\\\nnumEvents (Optional[int])\\\\nnumEntities (Optional[int])\\\\nnumAspects (Optional[int])\\\\nnumSourceAPICalls (Optional[int])\\\\ntotalLatencySourceAPICalls (Optional[int])\\\\nnumSinkAPICalls (Optional[int])\\\\ntotalLatencySinkAPICalls (Optional[int])\\\\nnumWarnings (Optional[int])\\\\nnumErrors (Optional[int])\\\\nnumEntitiesSkipped (Optional[int])\\\\nconfig (Optional[str])\\\\ncustomsummary (Optional[str])\\\\nsoftwareVersion (Optional[str])\\\\nsystemHostName (Optional[str])\\\\noperatingSystemName (Optional[str])\\\\nnumProcessors (Optional[int])\\\\ntotalMemory (Optional[int])\\\\navailableMemory (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nASPECTTYPE None | int\\\\nThe available memory on the host the ingestion pipeline ran on.\\\\n\\\\n\\\\n\\\\nproperty config None | str\\\\nCustom value.\\\\n\\\\n\\\\n\\\\nproperty eventGranularity None | str\\\\nThe optional messageId, if provided serves as a custom user-defined unique identifier for an aspect value.\\\\n\\\\n\\\\n\\\\nproperty numAspects None | int\\\\nThe total number of entities produced (unique entity urns).\\\\n\\\\n\\\\n\\\\nproperty numEntitiesSkipped None | int\\\\nNumber of errors generated.\\\\n\\\\n\\\\n\\\\nproperty numEvents None | int\\\\nThe number of processors on the host the ingestion pipeline ran on.\\\\n\\\\n\\\\n\\\\nproperty numSinkAPICalls None | int\\\\nTotal number of source API calls.\\\\n\\\\n\\\\n\\\\nproperty numWarnings None | int\\\\nThe number of workunits written to sink.\\\\n\\\\n\\\\n\\\\nproperty numWorkUnitsCreated None | str\\\\nThe os the ingestion pipeline ran on.\\\\n\\\\n\\\\n\\\\nproperty partitionSpec str\\\\nThe name of the pipeline that ran ingestion, a stable unique user provided identifier.\\\\ne.g. mysnowflake1-to-datahub.\\\\n\\\\n\\\\n\\\\nproperty platformInstanceId Bigquery project ids, MySQL hostnames etc.\\\\n\\\\n\\\\n\\\\nproperty runId str | JobStatusClass\\\\nRun Status - Succeeded/Skipped/Failed etc.\\\\n\\\\n\\\\n\\\\nproperty softwareVersion None | str\\\\nThe hostname the ingestion pipeline ran on.\\\\n\\\\n\\\\n\\\\nproperty timestampMillis None | int\\\\nTotal latency across all sink API calls.\\\\n\\\\n\\\\n\\\\nproperty totalLatencySourceAPICalls None | int\\\\nThe total amount of memory on the host the ingestion pipeline ran on.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetAssertionInfoClass(dataset, scope, operator, fields=None, aggregation=None, parameters=None, nativeType=None, nativeParameters=None, logic=None)\\\\nBases\\\\n\\\\ndataset (str)\\\\nscope (Union[str, DatasetAssertionScopeClass])\\\\noperator (Union[str, AssertionStdOperatorClass])\\\\nfields (Optional[List[str]])\\\\naggregation (Union[None, str, AssertionStdAggregationClass])\\\\nparameters (Optional[AssertionStdParametersClass])\\\\nnativeType (Optional[str])\\\\nnativeParameters (Optional[Dict[str, str]])\\\\nlogic (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aggregation str\\\\nThe dataset targeted by this assertion.\\\\n\\\\n\\\\n\\\\nproperty fields None | str\\\\n\\\\n\\\\n\\\\nproperty nativeParameters None | str\\\\nNative assertion type\\\\n\\\\n\\\\n\\\\nproperty operator None | AssertionStdParametersClass\\\\nStandard parameters required for the assertion. e.g. minvalue, maxvalue, value, columns\\\\n\\\\n\\\\n\\\\nproperty scope object\\\\n\\\\n\\\\nDATASETCOLUMN = \'DATASETCOLUMN\'\\\\n\\\\n\\\\n\\\\nDATASETROWS = \'DATASETROWS\'\\\\n\\\\n\\\\n\\\\nDATASETSCHEMA = \'DATASETSCHEMA\'\\\\n\\\\n\\\\n\\\\nDATASETSTORAGESIZE = \'DATASETSTORAGESIZE\'\\\\n\\\\n\\\\n\\\\nUNKNOWN = \'UNKNOWN\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetDeprecationClass(deprecated, note, decommissionTime=None, actor=None)\\\\nBases\\\\n\\\\ndeprecated (bool)\\\\nnote (str)\\\\ndecommissionTime (Optional[int])\\\\nactor (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty actor None | int\\\\nThe time user plan to decommission this dataset.\\\\n\\\\n\\\\n\\\\nproperty deprecated str\\\\nAdditional information about the dataset deprecation plan, such as the wiki, doc, RB.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetFieldForeignKeyClass(parentDataset, currentFieldPaths, parentField)\\\\nBases\\\\n\\\\nparentDataset (str)\\\\ncurrentFieldPaths (List[str])\\\\nparentField (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty currentFieldPaths str\\\\ndataset that stores the resource.\\\\n\\\\n\\\\n\\\\nproperty parentField DictWrapper\\\\nRepresentation of mapping between fields in source dataset to the field in destination dataset\\\\n\\\\nParameters AuditStampClass\\\\nAudit stamp containing who reported the field mapping and when\\\\n\\\\n\\\\n\\\\nproperty destinationField List[str]\\\\nSource fields from which the fine grained lineage is derived\\\\n\\\\n\\\\n\\\\nproperty transformation DictWrapper\\\\nStats corresponding to fields in a dataset\\\\n\\\\nParameters None | List[ValueFrequencyClass]\\\\n\\\\n\\\\n\\\\nproperty fieldPath None | HistogramClass\\\\n\\\\n\\\\n\\\\nproperty max None | str\\\\n\\\\n\\\\n\\\\nproperty median None | str\\\\n\\\\n\\\\n\\\\nproperty nullCount None | float\\\\n\\\\n\\\\n\\\\nproperty quantiles None | List[str]\\\\n\\\\n\\\\n\\\\nproperty stdev None | int\\\\n\\\\n\\\\n\\\\nproperty uniqueProportion DictWrapper\\\\nRecords field-level usage counts for a given dataset\\\\n\\\\nParameters int\\\\nNumber of times the field has been used.\\\\n\\\\n\\\\n\\\\nproperty fieldPath DictWrapper\\\\nA definition of filters that should be used when\\\\nquerying an external Dataset or Table.\\\\nNote that this models should NOT be used for working with\\\\nsearch / filter on DataHub Platform itself.\\\\n\\\\nParameters None | str\\\\nThe raw where clause string which will be used for monitoring.\\\\nRequired if the type is SQL.\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nSQL = \'SQL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetKeyClass(platform, name, origin)\\\\nBases\\\\n\\\\nplatform (str)\\\\nname (str)\\\\norigin (Union[str, FabricTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str | FabricTypeClass\\\\nFabric type where dataset belongs to or where it was generated.\\\\n\\\\n\\\\n\\\\nproperty platform object\\\\nThe various types of supported dataset lineage\\\\n\\\\n\\\\nCOPY = \'COPY\'\\\\n\\\\n\\\\n\\\\nTRANSFORMED = \'TRANSFORMED\'\\\\n\\\\n\\\\n\\\\nVIEW = \'VIEW\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetProfileClass(timestampMillis, eventGranularity=None, partitionSpec=None, messageId=None, rowCount=None, columnCount=None, fieldProfiles=None, sizeInBytes=None)\\\\nBases\\\\n\\\\ntimestampMillis (int)\\\\neventGranularity (Optional[TimeWindowSizeClass])\\\\npartitionSpec (Optional[PartitionSpecClass])\\\\nmessageId (Optional[str])\\\\nrowCount (Optional[int])\\\\ncolumnCount (Optional[int])\\\\nfieldProfiles (Optional[List[DatasetFieldProfileClass]])\\\\nsizeInBytes (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nASPECTTYPE None | int\\\\nThe total number of columns (or schema fields)\\\\n\\\\n\\\\n\\\\nproperty eventGranularity None | List[DatasetFieldProfileClass]\\\\nProfiles for each column (or schema field)\\\\n\\\\n\\\\n\\\\nproperty messageId PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty rowCount None | int\\\\nStorage size in bytes\\\\n\\\\n\\\\n\\\\nproperty timestampMillis Aspect\\\\nProperties associated with a Dataset\\\\n\\\\nParameters None | TimeStampClass\\\\nA timestamp documenting when the asset was created in the source Data Platform (not on DataHub)\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDocumentation of the dataset\\\\n\\\\n\\\\n\\\\nproperty externalUrl None | TimeStampClass\\\\nA timestamp documenting when the asset was last modified in the source Data Platform (not on DataHub)\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nFully-qualified name of the Dataset\\\\n\\\\n\\\\n\\\\nproperty tags None | str\\\\n///dir/filename. Uri should not include any environment specific properties. Some datasets might not have a standardized uri, which makes this field optional (i.e. kafka topic).\\\\n\\\\nType\\\\n///data/tracking/PageViewEvent, file\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[DatasetKeyClass, DatasetPropertiesClass, EditableDatasetPropertiesClass, DatasetDeprecationClass, DatasetUpstreamLineageClass, UpstreamLineageClass, InstitutionalMemoryClass, OwnershipClass, StatusClass, SchemaMetadataClass, EditableSchemaMetadataClass, GlobalTagsClass, GlossaryTermsClass, BrowsePathsClass, DataPlatformInstanceClass, ViewPropertiesClass, BrowsePathsV2Class]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DatasetUpstreamLineageClass(fieldMappings)\\\\nBases\\\\nfieldMappings (List[DatasetFieldMappingClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty fieldMappings Aspect\\\\nStats corresponding to dataset\\\\u2019s usage.\\\\n\\\\nParameters ClassVar[str] = \'timeseries\'\\\\n\\\\n\\\\n\\\\nproperty eventGranularity None | List[DatasetFieldUsageCountsClass]\\\\nField-level usage stats\\\\n\\\\n\\\\n\\\\nproperty messageId PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty timestampMillis None | List[str]\\\\nFrequent SQL queries; mostly makes sense for datasets in SQL databases\\\\n\\\\n\\\\n\\\\nproperty totalSqlQueries None | int\\\\nUnique user count\\\\n\\\\n\\\\n\\\\nproperty userCounts DictWrapper\\\\nRecords a single user\\\\u2019s usage counts for a given resource\\\\n\\\\nParameters int\\\\nNumber of times the dataset has been used by the user.\\\\n\\\\n\\\\n\\\\nproperty user None | str\\\\nIf useremail is set, we attempt to resolve the user\\\\u2019s urn upon ingest\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DateTypeClass\\\\nBases object\\\\nModel endpoint statuses\\\\n\\\\n\\\\nCREATING = \'CREATING\'\\\\n\\\\n\\\\n\\\\nDELETING = \'DELETING\'\\\\n\\\\n\\\\n\\\\nFAILED = \'FAILED\'\\\\n\\\\n\\\\n\\\\nINSERVICE = \'INSERVICE\'\\\\n\\\\n\\\\n\\\\nOUTOFSERVICE = \'OUTOFSERVICE\'\\\\n\\\\n\\\\n\\\\nROLLINGBACK = \'ROLLINGBACK\'\\\\n\\\\n\\\\n\\\\nUNKNOWN = \'UNKNOWN\'\\\\n\\\\n\\\\n\\\\nUPDATING = \'UPDATING\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DeprecationClass(deprecated, note, actor, decommissionTime=None)\\\\nBases\\\\n\\\\ndeprecated (bool)\\\\nnote (str)\\\\nactor (str)\\\\ndecommissionTime (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty actor None | int\\\\nThe time user plan to decommission this entity.\\\\n\\\\n\\\\n\\\\nproperty deprecated str\\\\nAdditional information about the entity deprecation plan, such as the wiki, doc, RB.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DomainKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id Aspect\\\\nInformation about a Domain\\\\n\\\\nParameters None | AuditStampClass\\\\nCreated Audit stamp\\\\n\\\\n\\\\n\\\\nproperty description str\\\\nDisplay name of the Domain\\\\n\\\\n\\\\n\\\\nproperty parentDomain\\\\nOptional\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.DomainsClass(domains)\\\\nBases\\\\ndomains (List[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty domains DictWrapper\\\\nA common structure to represent all edges to entities when used inside aspects as collections\\\\nThis ensures that all edges have common structure around audit-stamps and will support PATCH, time-travel automatically.\\\\n\\\\nParameters None | AuditStampClass\\\\nAudit stamp containing who created this relationship edge and when\\\\n\\\\n\\\\n\\\\nproperty destinationUrn None | AuditStampClass\\\\nAudit stamp containing who last modified this relationship edge and when\\\\n\\\\n\\\\n\\\\nproperty properties None | str\\\\nUrn of the source of this relationship edge.\\\\nIf not specified, assumed to be the entity that this aspect belongs to.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EditableChartPropertiesClass(created=None, lastModified=None, deleted=None, description=None)\\\\nBases\\\\n\\\\ncreated (Optional[AuditStampClass])\\\\nlastModified (Optional[AuditStampClass])\\\\ndeleted (Optional[AuditStampClass])\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created None | AuditStampClass\\\\nAn AuditStamp corresponding to the deletion of this resource/association/sub-resource. Logically, deleted MUST have a later timestamp than creation. It may or may not have the same time as lastModified depending upon the resource/association/sub-resource semantics.\\\\n\\\\n\\\\n\\\\nproperty description AuditStampClass\\\\nAn AuditStamp corresponding to the last modification of this resource/association/sub-resource. If no modification has happened since creation, lastModified should be the same as created. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EditableContainerPropertiesClass(description=None)\\\\nBases\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty description Aspect\\\\nStores editable changes made to properties. This separates changes made from\\\\ningestion pipelines and edits in the UI to avoid accidental overwrites of user-provided data by ingestion pipelines\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted None | str\\\\nEdited documentation of the dashboard\\\\n\\\\n\\\\n\\\\nproperty lastModified Aspect\\\\nStores editable changes made to properties. This separates changes made from\\\\ningestion pipelines and edits in the UI to avoid accidental overwrites of user-provided data by ingestion pipelines\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted None | str\\\\nEdited documentation of the data flow\\\\n\\\\n\\\\n\\\\nproperty lastModified Aspect\\\\nStores editable changes made to properties. This separates changes made from\\\\ningestion pipelines and edits in the UI to avoid accidental overwrites of user-provided data by ingestion pipelines\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted None | str\\\\nEdited documentation of the data job\\\\n\\\\n\\\\n\\\\nproperty lastModified Aspect\\\\nEditableDatasetProperties stores editable changes made to dataset properties. This separates changes made from\\\\ningestion pipelines and edits in the UI to avoid accidental overwrites of user-provided data by ingestion pipelines\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted None | str\\\\nDocumentation of the dataset\\\\n\\\\n\\\\n\\\\nproperty lastModified Aspect\\\\nProperties associated with a MLFeature editable from the UI\\\\n\\\\nParameters None | str\\\\nDocumentation of the MLFeature\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EditableMLFeatureTablePropertiesClass(description=None)\\\\nBases\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty description Aspect\\\\nProperties associated with an ML Model Group editable from the UI\\\\n\\\\nParameters None | str\\\\nDocumentation of the ml model group\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EditableMLModelPropertiesClass(description=None)\\\\nBases\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty description Aspect\\\\nProperties associated with a MLPrimaryKey editable from the UI\\\\n\\\\nParameters None | str\\\\nDocumentation of the MLPrimaryKey\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EditableNotebookPropertiesClass(created=None, lastModified=None, deleted=None, description=None)\\\\nBases This is IN BETA version\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted None | str\\\\nEdited documentation of the Notebook\\\\n\\\\n\\\\n\\\\nproperty lastModified DictWrapper\\\\nSchemaField to describe metadata related to dataset schema.\\\\n\\\\nParameters None | str\\\\nDescription\\\\n\\\\n\\\\n\\\\nproperty fieldPath None | GlobalTagsClass\\\\nTags associated with the field\\\\n\\\\n\\\\n\\\\nproperty glossaryTerms Aspect\\\\nEditableSchemaMetadata stores editable changes made to schema metadata. This separates changes made from\\\\ningestion pipelines and edits in the UI to avoid accidental overwrites of user-provided data by ingestion pipelines.\\\\n\\\\nParameters AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty deleted List[EditableSchemaFieldInfoClass]\\\\nClient provided a list of fields from document schema.\\\\n\\\\n\\\\n\\\\nproperty lastModified Aspect\\\\nInformation regarding rendering an embed for an asset.\\\\n\\\\nParameters None | str\\\\nAn embed URL to be rendered inside of an iframe.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EntityChangeEventClass(entityType, entityUrn, category, operation, auditStamp, version, modifier=None, parameters=None)\\\\nBases\\\\n\\\\nentityType (str)\\\\nentityUrn (str)\\\\ncategory (str)\\\\noperation (str)\\\\nauditStamp (AuditStampClass)\\\\nversion (int)\\\\nmodifier (Optional[str])\\\\nparameters (Optional[ParametersClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty auditStamp str\\\\nThe category type (TAG, GLOSSARYTERM, OWNERSHIP, TECHNICALSCHEMA, etc). This is used to determine what the rest of the schema will look like.\\\\n\\\\n\\\\n\\\\nproperty entityType str\\\\nThe urn of the entity which was affected.\\\\n\\\\n\\\\n\\\\nproperty modifier str\\\\nThe operation type. This is used to determine what the rest of the schema will look like.\\\\n\\\\n\\\\n\\\\nproperty parameters int\\\\nThe version of the event type, incremented in integers.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EnumTypeClass\\\\nBases DictWrapper\\\\nSchema text of an espresso table schema.\\\\n\\\\nParameters str\\\\nThe native espresso document schema.\\\\n\\\\n\\\\n\\\\nproperty tableSchema Aspect\\\\nThis section is intended to demonstrate the ethical considerations that went into MLModel development, surfacing ethical challenges and solutions to stakeholders.\\\\n\\\\nParameters None | List[str]\\\\nDoes the MLModel use any sensitive data (e.g., protected classes)?\\\\n\\\\n\\\\n\\\\nproperty humanLife None | List[str]\\\\nWhat risk mitigation strategies were used during MLModel development?\\\\n\\\\n\\\\n\\\\nproperty risksAndHarms None | List[str]\\\\nAre there any known MLModel use cases that are especially fraught? This may connect directly to the intended use section\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.EvaluationDataClass(evaluationData)\\\\nBases\\\\nevaluationData (List[BaseDataClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty evaluationData Aspect\\\\nAn request to execution some remote logic or action.\\\\nTODO\\\\n\\\\ntask (str)\\\\nargs (Dict[str, str])\\\\nexecutorId (str)\\\\nsource (ExecutionRequestSourceClass)\\\\nrequestedAt (int)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty args str\\\\nspecify a specific executor to route the request to. If none is provided, a \\\\u201cdefault\\\\u201d executor is used.\\\\n\\\\nType int\\\\nTime at which the execution request input was created\\\\n\\\\n\\\\n\\\\nproperty source str\\\\nThe name of the task to execute, for example RUNINGEST\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ExecutionRequestKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id Aspect\\\\nThe result of an execution request\\\\n\\\\nParameters None | int\\\\nDuration in milliseconds\\\\n\\\\n\\\\n\\\\nproperty report None | int\\\\nTime at which the request was created\\\\n\\\\n\\\\n\\\\nproperty status None | StructuredExecutionReportClass\\\\nA structured report if available.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ExecutionRequestSignalClass(signal, createdAt, executorId=None)\\\\nBases\\\\n\\\\nsignal (str)\\\\ncreatedAt (AuditStampClass)\\\\nexecutorId (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty createdAt None | str\\\\nspecify a specific executor to route the request to. If none is provided, a \\\\u201cdefault\\\\u201d executor is used.\\\\n\\\\nType str\\\\nThe signal to issue, e.g. KILL\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ExecutionRequestSourceClass(type, ingestionSource=None)\\\\nBases\\\\n\\\\ntype (str)\\\\ningestionSource (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty ingestionSource str\\\\nThe type of the execution request source, e.g. INGESTIONSOURCE\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.FabricTypeClass\\\\nBases DictWrapper\\\\nRecords field-level usage counts for a given resource\\\\n\\\\nParameters int\\\\n\\\\n\\\\n\\\\nproperty fieldName DictWrapper\\\\nThe filter for finding a record or a collection of records\\\\n\\\\nParameters None | List[CriterionClass]\\\\nDeprecated! A list of conjunctive criterion for the filter. If \\\\u201cor\\\\u201d field is provided, then this field is ignored.\\\\n\\\\n\\\\n\\\\nproperty or DictWrapper\\\\nA fine-grained lineage from upstream fields/datasets to downstream field(s)\\\\n\\\\nParameters float\\\\nThe confidence in this lineage between 0 (low confidence) and 1 (high confidence)\\\\n\\\\n\\\\n\\\\nproperty downstreamType None | List[str]\\\\nDownstream fields in the lineage\\\\n\\\\n\\\\n\\\\nproperty transformOperation str | FineGrainedLineageUpstreamTypeClass\\\\nThe type of upstream entity\\\\n\\\\n\\\\n\\\\nproperty upstreams object\\\\nThe type of downstream field(s) in a fine-grained lineage\\\\n\\\\n\\\\nFIELD = \'FIELD\'\\\\n\\\\n\\\\n\\\\nFIELDSET = \'FIELDSET\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.FineGrainedLineageUpstreamTypeClass\\\\nBases DictWrapper\\\\nAttributes defining a relative fixed interval SLA schedule.\\\\n\\\\nParameters int\\\\nHow many units. Defaults to 1.\\\\n\\\\n\\\\n\\\\nproperty unit DictWrapper\\\\nFixed field type.\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ForeignKeyConstraintClass(name, foreignFields, sourceFields, foreignDataset)\\\\nBases\\\\n\\\\nname (str)\\\\nforeignFields (List[str])\\\\nsourceFields (List[str])\\\\nforeignDataset (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty foreignDataset List[str]\\\\nFields the constraint maps to on the foreign dataset\\\\n\\\\n\\\\n\\\\nproperty name List[str]\\\\nFields the constraint maps to on the source dataset\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.ForeignKeySpecClass(foreignKey)\\\\nBases\\\\nforeignKey (Union[DatasetFieldForeignKeyClass, UrnForeignKeyClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty foreignKey DictWrapper\\\\nAttributes defining a Freshness Assertion.\\\\n\\\\nParameters str\\\\nThe entity targeted by this Freshness check.\\\\n\\\\n\\\\n\\\\nproperty filter FreshnessAssertionScheduleClass\\\\nProduce FAILURE Assertion Result if the asset is not updated on the cadence and within the time range described by the schedule.\\\\n\\\\n\\\\n\\\\nproperty type DictWrapper\\\\nAttributes defining a single Freshness schedule.\\\\n\\\\nParameters None | FreshnessCronScheduleClass\\\\nA cron schedule. This field is required when type is CRON.\\\\n\\\\n\\\\n\\\\nproperty fixedInterval str | FreshnessAssertionScheduleTypeClass\\\\nThe type of a Freshness Assertion Schedule.\\\\nOnce we support data-time-relative schedules (e.g. schedules relative to time partitions),\\\\nwe will add those schedule types here.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.FreshnessAssertionScheduleTypeClass\\\\nBases object\\\\n\\\\n\\\\nDATASETCHANGE = \'DATASETCHANGE\'\\\\n\\\\n\\\\n\\\\nDATAJOBRUN = \'DATAJOBRUN\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.FreshnessContractClass(assertion)\\\\nBases\\\\nassertion (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty assertion DictWrapper\\\\nAttributes defining a CRON-formatted schedule used for defining a freshness assertion.\\\\n\\\\nParameters str\\\\nA cron-formatted execution interval, as a cron string, e.g. 1 * * * *\\\\n\\\\n\\\\n\\\\nproperty timezone None | int\\\\nAn optional offset in milliseconds to SUBTRACT from the timestamp generated by the cron schedule\\\\nto generate the lower bounds of the \\\\u201cfreshness window\\\\u201d, or the window of time in which an event must have occurred in order for the Freshness check\\\\nto be considering passing.\\\\nIf left empty, the start of the SLA window will be the end of the previously evaluated Freshness window.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GenericAspectClass(value, contentType)\\\\nBases\\\\n\\\\nvalue (bytes)\\\\ncontentType (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty contentType bytes\\\\nThe value of the aspect, serialized as bytes.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GenericPayloadClass(value, contentType)\\\\nBases\\\\n\\\\nvalue (bytes)\\\\ncontentType (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty contentType bytes\\\\nThe value of the event, serialized as bytes.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GlobalSettingsInfoClass(views=None)\\\\nBases\\\\nviews (Optional[GlobalViewsSettingsClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty views Aspect\\\\nKey for a Global Settings\\\\n\\\\nParameters str\\\\nli0\\\\n\\\\nType\\\\nurn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GlobalTagsClass(tags)\\\\nBases\\\\ntags (List[TagAssociationClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty tags DictWrapper\\\\nSettings for DataHub Views feature.\\\\n\\\\nParameters None | str\\\\nThe default View for the instance, or organization.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GlossaryNodeInfoClass(definition, parentNode=None, name=None, id=None)\\\\nBases\\\\n\\\\ndefinition (str)\\\\nparentNode (Optional[str])\\\\nname (Optional[str])\\\\nid (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty definition None | str\\\\nOptional id for the GlossaryNode\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nParent node of the glossary term\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GlossaryNodeKeyClass(name)\\\\nBases\\\\nname (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty name DictWrapper\\\\nA metadata snapshot for a specific GlossaryNode entity.\\\\n\\\\nParameters List[GlossaryNodeKeyClass | GlossaryNodeInfoClass | OwnershipClass | StatusClass]\\\\nThe list of metadata aspects associated with the GlossaryNode. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nHas A / Is A lineage information about a glossary Term reporting the lineage\\\\n\\\\nParameters None | List[str]\\\\nThe relationship Has A with glossary term\\\\n\\\\n\\\\n\\\\nproperty isRelatedTerms None | List[str]\\\\nThe relationship isRelatedTo with glossary term\\\\n\\\\n\\\\n\\\\nproperty values DictWrapper\\\\nProperties of an applied glossary term.\\\\n\\\\nParameters None | str\\\\nAdditional context about the association\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nProperties associated with a GlossaryTerm\\\\n\\\\nParameters Dict[str, str]\\\\nCustom property bag.\\\\n\\\\n\\\\n\\\\nproperty definition None | str\\\\nOptional id for the term\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nParent node of the glossary term\\\\n\\\\n\\\\n\\\\nproperty rawSchema None | str\\\\nExternal Reference to the business-term\\\\n\\\\n\\\\n\\\\nproperty sourceUrl\\\\nThe abstracted URL such as https\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty termSource Aspect\\\\nKey for a GlossaryTerm\\\\n\\\\nParameters str\\\\nThe term name, which serves as a unique id\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GlossaryTermSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[GlossaryTermKeyClass, GlossaryTermInfoClass, OwnershipClass, StatusClass, BrowsePathsClass, GlossaryRelatedTermsClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GlossaryTermsClass(terms, auditStamp)\\\\nBases\\\\n\\\\nterms (List[GlossaryTermAssociationClass])\\\\nauditStamp (AuditStampClass)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty auditStamp List[GlossaryTermAssociationClass]\\\\nThe related business terms\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.GroupMembershipClass(groups)\\\\nBases\\\\ngroups (List[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty groups DictWrapper\\\\n\\\\nParameters List[str]\\\\n\\\\n\\\\n\\\\nproperty heights DictWrapper\\\\nThe definition of the transformer function  that should be applied to a given field / column value in a dataset\\\\nin order to determine the segment or bucket that it belongs to, which in turn is used to evaluate\\\\nvolume assertions.\\\\n\\\\nParameters None | str\\\\nThe \\\\u2018native\\\\u2019 transformer type, useful as a back door if a custom operator is required.\\\\nThis field is required if the type is NATIVE.\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nCEILING = \'CEILING\'\\\\n\\\\n\\\\n\\\\nFLOOR = \'FLOOR\'\\\\n\\\\n\\\\n\\\\nNATIVE = \'NATIVE\'\\\\n\\\\n\\\\n\\\\nTIMESTAMPMSTODATE = \'TIMESTAMPMSTODATE\'\\\\n\\\\n\\\\n\\\\nTIMESTAMPMSTOHOUR = \'TIMESTAMPMSTOHOUR\'\\\\n\\\\n\\\\n\\\\nTIMESTAMPMSTOMINUTE = \'TIMESTAMPMSTOMINUTE\'\\\\n\\\\n\\\\n\\\\nTIMESTAMPMSTOMONTH = \'TIMESTAMPMSTOMONTH\'\\\\n\\\\n\\\\n\\\\nTIMESTAMPMSTOYEAR = \'TIMESTAMPMSTOYEAR\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.IncrementingSegmentRowCountChangeClass(segment, type, operator, parameters)\\\\nBases\\\\n\\\\nsegment (IncrementingSegmentSpecClass)\\\\ntype (Union[str, AssertionValueChangeTypeClass])\\\\noperator (Union[str, AssertionStdOperatorClass])\\\\nparameters (AssertionStdParametersClass)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty operator\\\\nGREATERTHAN, GREATERTHANOREQUALTO, EQUALTO, LESSTHAN, LESSTHANOREQUALTO,\\\\nBETWEEN.\\\\n\\\\n\\\\n\\\\nproperty parameters NUMBER.\\\\n\\\\n\\\\n\\\\nproperty segment str | AssertionValueChangeTypeClass\\\\na fixed absolute value or a relative percentage.\\\\n\\\\nType DictWrapper\\\\nAttributes defining an INCREMENTINGSEGMENTROWCOUNTTOTAL volume assertion.\\\\n\\\\nParameters str | AssertionStdOperatorClass\\\\nThe operator you\\\\u2019d like to apply.\\\\nNote that only numeric operators are valid inputs AssertionStdParametersClass\\\\nThe parameters you\\\\u2019d like to provide as input to the operator.\\\\nNote that only numeric parameter types are valid inputs IncrementingSegmentSpecClass\\\\nA specification of how the \\\\u2018segment\\\\u2019 can be derived using a column and an optional transformer function.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.IncrementingSegmentSpecClass(field, transformer=None)\\\\nBases\\\\n\\\\n\\\\nA field or column that represents the incrementing value. New rows that are inserted will be identified using this column.\\\\nNote that the value of this column may not by itself represent the \\\\u201cbucket\\\\u201d or the \\\\u201csegment\\\\u201d in which the row falls.\\\\n[Optional] An transformer function that may be applied to the selected column value in order\\\\nto obtain the final \\\\u201csegment identifier\\\\u201d or \\\\u201cbucket identifier\\\\u201d. Rows that have the same value after applying the transformation\\\\nwill be grouped into the same segment, using which the final value (e.g. row count) will be determined.\\\\n\\\\n\\\\n\\\\nParameters SchemaFieldSpecClass\\\\nThe field to use to generate segments. It must be constantly incrementing as new rows are inserted.\\\\n\\\\n\\\\n\\\\nproperty transformer DictWrapper\\\\nThe checkpoint state object of a datahub ingestion run for a given job.\\\\n\\\\nParameters str\\\\nThe version of the state format.\\\\n\\\\n\\\\n\\\\nproperty payload str\\\\nThe serialization/deserialization protocol.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.InputFieldClass(schemaFieldUrn, schemaField=None)\\\\nBases\\\\n\\\\nschemaFieldUrn (str)\\\\nschemaField (Optional[SchemaFieldClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty schemaField str\\\\nUrn of the schema being referenced for lineage purposes\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.InputFieldsClass(fields)\\\\nBases\\\\nfields (List[InputFieldClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty fields Aspect\\\\nInstitutional memory of an entity. This is a way to link to relevant documentation and provide description of the documentation. Institutional or tribal knowledge is very important for users to leverage the entity.\\\\n\\\\nParameters List[InstitutionalMemoryMetadataClass]\\\\nList of records that represent institutional memory of an entity. Each record consists of a link, description, creator and timestamps associated with that record.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.InstitutionalMemoryMetadataClass(url, description, createStamp)\\\\nBases\\\\n\\\\nurl (str)\\\\ndescription (str)\\\\ncreateStamp (AuditStampClass)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty createStamp str\\\\nDescription of the link.\\\\n\\\\n\\\\n\\\\nproperty url Aspect\\\\nIntended Use for the ML Model\\\\n\\\\nParameters None | List[str]\\\\nHighlight technology that the MLModel might easily be confused with, or related contexts that users could try to apply the MLModel to.\\\\n\\\\n\\\\n\\\\nproperty primaryUsers None | List[str]\\\\nPrimary Use cases for the MLModel.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.IntendedUserTypeClass\\\\nBases Aspect\\\\nAspect used to store invite tokens.\\\\n\\\\nParameters None | str\\\\nThe role that this invite token may be associated with\\\\n\\\\n\\\\n\\\\nproperty token Aspect\\\\nKey for an InviteToken.\\\\n\\\\nParameters str\\\\nA unique id for the invite token.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.JobStatusClass\\\\nBases DictWrapper\\\\nThis header records information about the context of an event as it is emitted into kafka and is intended to be used by the kafka audit application.  For more information see go/kafkaauditheader\\\\n\\\\nParameters str\\\\nThe name of the application from which the event is being emitted. see go/appname\\\\n\\\\n\\\\n\\\\nproperty auditVersion\\\\nThe version that is being used for auditing. In version 0, the audit trail buckets events into 10 minute audit windows based on the EventHeader timestamp. In version 1, the audit trail buckets events as follows\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty clusterConnectionString None | str\\\\nfabric\\\\nThe fabricUrn of the host from which the event is being emitted. Fabric Urn in the format of urn\\\\n\\\\nType None | str\\\\nThe instance on the server from which the event is being emitted. e.g. i001\\\\n\\\\n\\\\n\\\\nproperty messageId str\\\\nThe fully qualified name of the host from which the event is being emitted.\\\\n\\\\n\\\\n\\\\nproperty time DictWrapper\\\\nSchema holder for kafka schema.\\\\n\\\\nParameters str\\\\nThe native kafka document schema. This is a human readable avro document schema.\\\\n\\\\n\\\\n\\\\nproperty documentSchemaType None | str\\\\nThe native kafka key schema as retrieved from Schema Registry\\\\n\\\\n\\\\n\\\\nproperty keySchemaType DictWrapper\\\\nSchema text of a key-value store schema.\\\\n\\\\nParameters str\\\\nThe raw schema for the key in the key-value store.\\\\n\\\\n\\\\n\\\\nproperty valueSchema object\\\\nMLFeature Data Type\\\\n\\\\n\\\\nAUDIO = \'AUDIO\'\\\\n\\\\n\\\\n\\\\nBINARY = \'BINARY\'\\\\n\\\\n\\\\n\\\\nBYTE = \'BYTE\'\\\\n\\\\n\\\\n\\\\nCONTINUOUS = \'CONTINUOUS\'\\\\n\\\\n\\\\n\\\\nCOUNT = \'COUNT\'\\\\n\\\\n\\\\n\\\\nIMAGE = \'IMAGE\'\\\\n\\\\n\\\\n\\\\nINTERVAL = \'INTERVAL\'\\\\n\\\\n\\\\n\\\\nMAP = \'MAP\'\\\\n\\\\n\\\\n\\\\nNOMINAL = \'NOMINAL\'\\\\n\\\\n\\\\n\\\\nORDINAL = \'ORDINAL\'\\\\n\\\\n\\\\n\\\\nSEQUENCE = \'SEQUENCE\'\\\\n\\\\n\\\\n\\\\nSET = \'SET\'\\\\n\\\\n\\\\n\\\\nTEXT = \'TEXT\'\\\\n\\\\n\\\\n\\\\nTIME = \'TIME\'\\\\n\\\\n\\\\n\\\\nUNKNOWN = \'UNKNOWN\'\\\\n\\\\n\\\\n\\\\nUSELESS = \'USELESS\'\\\\n\\\\n\\\\n\\\\nVIDEO = \'VIDEO\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLFeatureKeyClass(featureNamespace, name)\\\\nBases\\\\n\\\\nfeatureNamespace (str)\\\\nname (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty featureNamespace str\\\\nName of the feature\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLFeaturePropertiesClass(description=None, dataType=None, version=None, sources=None)\\\\nBases\\\\n\\\\ndescription (Optional[str])\\\\ndataType (Union[None, str, MLFeatureDataTypeClass])\\\\nversion (Optional[VersionTagClass])\\\\nsources (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty dataType None | str\\\\nDocumentation of the MLFeature\\\\n\\\\n\\\\n\\\\nproperty sources None | VersionTagClass\\\\nVersion of the MLFeature\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLFeatureSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[MLFeatureKeyClass, MLFeaturePropertiesClass, OwnershipClass, InstitutionalMemoryClass, StatusClass, DeprecationClass, BrowsePathsClass, GlobalTagsClass, DataPlatformInstanceClass, BrowsePathsV2Class]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLFeatureTableKeyClass(platform, name)\\\\nBases\\\\n\\\\nplatform (str)\\\\nname (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str\\\\nData platform urn associated with the feature table\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLFeatureTablePropertiesClass(customProperties=None, description=None, mlFeatures=None, mlPrimaryKeys=None)\\\\nBases\\\\n\\\\ncustomProperties (Optional[Dict[str, str]])\\\\ndescription (Optional[str])\\\\nmlFeatures (Optional[List[str]])\\\\nmlPrimaryKeys (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDocumentation of the MLFeatureTable\\\\n\\\\n\\\\n\\\\nproperty mlFeatures None | List[str]\\\\nList of primary keys in the feature table (if multiple, assumed to act as a composite key)\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLFeatureTableSnapshotClass(urn, aspects)\\\\nBases\\\\n\\\\nurn (str)\\\\naspects (List[Union[MLFeatureTableKeyClass, MLFeatureTablePropertiesClass, OwnershipClass, InstitutionalMemoryClass, StatusClass, DeprecationClass, BrowsePathsClass, GlobalTagsClass, DataPlatformInstanceClass, BrowsePathsV2Class]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspects str\\\\nURN for the entity the metadata snapshot is associated with.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLHyperParamClass(name, description=None, value=None, createdAt=None)\\\\nBases\\\\n\\\\nname (str)\\\\ndescription (Optional[str])\\\\nvalue (Optional[str])\\\\ncreatedAt (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty createdAt None | str\\\\nDocumentation of the MLHyperParam\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nThe value of the MLHyperParam\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLMetricClass(name, description=None, value=None, createdAt=None)\\\\nBases\\\\n\\\\nname (str)\\\\ndescription (Optional[str])\\\\nvalue (Optional[str])\\\\ncreatedAt (Optional[int])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty createdAt None | str\\\\nDocumentation of the mlMetric\\\\n\\\\n\\\\n\\\\nproperty name None | str\\\\nThe value of the mlMetric\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLModelDeploymentKeyClass(platform, name, origin)\\\\nBases\\\\n\\\\nplatform (str)\\\\nname (str)\\\\norigin (Union[str, FabricTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str | FabricTypeClass\\\\nFabric type where model Deployment belongs to or where it was generated\\\\n\\\\n\\\\n\\\\nproperty platform Aspect\\\\nProperties associated with an ML Model Deployment\\\\n\\\\nParameters None | int\\\\nDate when the MLModelDeployment was developed\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDocumentation of the MLModelDeployment\\\\n\\\\n\\\\n\\\\nproperty externalUrl None | str | DeploymentStatusClass\\\\nStatus of the deployment\\\\n\\\\n\\\\n\\\\nproperty version DictWrapper\\\\n\\\\nParameters List[MLModelDeploymentKeyClass | MLModelDeploymentPropertiesClass | OwnershipClass | StatusClass | DeprecationClass | GlobalTagsClass | DataPlatformInstanceClass]\\\\nThe list of metadata aspects associated with the MLModelDeployment. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nPrompts which affect the performance of the MLModel\\\\n\\\\nParameters None | List[MLModelFactorsClass]\\\\nWhich factors are being reported, and why were these chosen?\\\\n\\\\n\\\\n\\\\nproperty relevantFactors DictWrapper\\\\nFactors affecting the performance of the MLModel.\\\\n\\\\nParameters None | List[str]\\\\nA further factor affecting MLModel performance is the environment in which it is deployed.\\\\n\\\\n\\\\n\\\\nproperty groups None | List[str]\\\\nThe performance of a MLModel can vary depending on what instruments were used to capture the input to the MLModel.\\\\nFor example, a face detection model may perform differently depending on the camera\\\\u2019s hardware and software,\\\\nincluding lens, image stabilization, high dynamic range techniques, and background blurring for portrait mode.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLModelGroupKeyClass(platform, name, origin)\\\\nBases\\\\n\\\\nplatform (str)\\\\nname (str)\\\\norigin (Union[str, FabricTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str | FabricTypeClass\\\\nFabric type where model group belongs to or where it was generated\\\\n\\\\n\\\\n\\\\nproperty platform Aspect\\\\nProperties associated with an ML Model Group\\\\n\\\\nParameters None | int\\\\nDate when the MLModelGroup was developed\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDocumentation of the MLModelGroup\\\\n\\\\n\\\\n\\\\nproperty version DictWrapper\\\\n\\\\nParameters List[MLModelGroupKeyClass | MLModelGroupPropertiesClass | OwnershipClass | StatusClass | DeprecationClass | BrowsePathsClass | GlobalTagsClass | DataPlatformInstanceClass | BrowsePathsV2Class]\\\\nThe list of metadata aspects associated with the MLModelGroup. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nKey for an ML model\\\\n\\\\nParameters str\\\\nName of the MLModel\\\\n\\\\n\\\\n\\\\nproperty origin str\\\\nStandardized platform urn for the model\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MLModelPropertiesClass(customProperties=None, externalUrl=None, description=None, date=None, version=None, type=None, hyperParameters=None, hyperParams=None, trainingMetrics=None, onlineMetrics=None, mlFeatures=None, tags=None, deployments=None, trainingJobs=None, downstreamJobs=None, groups=None)\\\\nBases\\\\n\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\ndescription (Optional[str])\\\\ndate (Optional[int])\\\\nversion (Optional[VersionTagClass])\\\\ntype (Optional[str])\\\\nhyperParameters (Optional[Dict[str, Union[str, int, float, bool]]])\\\\nhyperParams (Optional[List[MLHyperParamClass]])\\\\ntrainingMetrics (Optional[List[MLMetricClass]])\\\\nonlineMetrics (Optional[List[MLMetricClass]])\\\\nmlFeatures (Optional[List[str]])\\\\ntags (Optional[List[str]])\\\\ndeployments (Optional[List[str]])\\\\ntrainingJobs (Optional[List[str]])\\\\ndownstreamJobs (Optional[List[str]])\\\\ngroups (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty customProperties None | int\\\\nDate when the MLModel was developed\\\\n\\\\n\\\\n\\\\nproperty deployments None | str\\\\nDocumentation of the MLModel\\\\n\\\\n\\\\n\\\\nproperty downstreamJobs None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty groups None | Dict[str, str | int | float | bool]\\\\nHyper Parameters of the MLModel\\\\nNOTE None | List[MLHyperParamClass]\\\\nHyperparameters of the MLModel\\\\n\\\\n\\\\n\\\\nproperty mlFeatures None | List[MLMetricClass]\\\\nMetrics of the MLModel used in production\\\\n\\\\n\\\\n\\\\nproperty tags None | List[str]\\\\nList of jobs (if any) used to train the model\\\\n\\\\n\\\\n\\\\nproperty trainingMetrics None | str\\\\nType of Algorithm or MLModel such as whether it is a Naive Bayes classifier, Convolutional Neural Network, etc\\\\n\\\\n\\\\n\\\\nproperty version DictWrapper\\\\nMLModel Snapshot entity details.\\\\n\\\\nParameters List[MLModelKeyClass | OwnershipClass | MLModelPropertiesClass | IntendedUseClass | MLModelFactorPromptsClass | MetricsClass | EvaluationDataClass | TrainingDataClass | QuantitativeAnalysesClass | EthicalConsiderationsClass | CaveatsAndRecommendationsClass | InstitutionalMemoryClass | SourceCodeClass | StatusClass | CostClass | DeprecationClass | BrowsePathsClass | GlobalTagsClass | DataPlatformInstanceClass | BrowsePathsV2Class]\\\\nThe list of metadata aspects associated with the MLModel. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nKey for an MLPrimaryKey\\\\n\\\\nParameters str\\\\nNamespace for the primary key\\\\n\\\\n\\\\n\\\\nproperty name Aspect\\\\nProperties associated with a MLPrimaryKey\\\\n\\\\nParameters None | str | MLFeatureDataTypeClass\\\\nData Type of the MLPrimaryKey\\\\n\\\\n\\\\n\\\\nproperty description List[str]\\\\nSource of the MLPrimaryKey\\\\n\\\\n\\\\n\\\\nproperty version DictWrapper\\\\n\\\\nParameters List[MLPrimaryKeyKeyClass | MLPrimaryKeyPropertiesClass | OwnershipClass | InstitutionalMemoryClass | StatusClass | DeprecationClass | GlobalTagsClass | DataPlatformInstanceClass]\\\\nThe list of metadata aspects associated with the MLPrimaryKey. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn DictWrapper\\\\nMap field type.\\\\n\\\\nParameters None | str\\\\nKey type in a map\\\\n\\\\n\\\\n\\\\nproperty valueType DictWrapper\\\\nCarries information about which roles a user is assigned to.\\\\n\\\\nParameters str\\\\nWhere the media content is stored.\\\\n\\\\n\\\\n\\\\nproperty type object\\\\nEnum defining the type of content a Media object holds.\\\\n\\\\n\\\\nIMAGE = \'IMAGE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MetadataChangeEventClass(proposedSnapshot, auditHeader=None, proposedDelta=None, systemMetadata=None)\\\\nBases\\\\n\\\\nproposedSnapshot (Union[ChartSnapshotClass, CorpGroupSnapshotClass, CorpUserSnapshotClass, DashboardSnapshotClass, DataFlowSnapshotClass, DataJobSnapshotClass, DatasetSnapshotClass, DataProcessSnapshotClass, DataPlatformSnapshotClass, MLModelSnapshotClass, MLPrimaryKeySnapshotClass, MLFeatureSnapshotClass, MLFeatureTableSnapshotClass, MLModelDeploymentSnapshotClass, MLModelGroupSnapshotClass, TagSnapshotClass, GlossaryTermSnapshotClass, GlossaryNodeSnapshotClass, DataHubPolicySnapshotClass, SchemaFieldSnapshotClass, DataHubRetentionSnapshotClass])\\\\nauditHeader (Optional[KafkaAuditHeaderClass])\\\\nproposedDelta (None)\\\\nsystemMetadata (Optional[SystemMetadataClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty auditHeader None\\\\nDelta of the proposed metadata partial update.\\\\n\\\\n\\\\n\\\\nproperty proposedSnapshot None | SystemMetadataClass\\\\nMetadata around how the snapshot was ingested\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MetadataChangeLogClass(entityType, changeType, auditHeader=None, entityUrn=None, entityKeyAspect=None, aspectName=None, aspect=None, systemMetadata=None, previousAspectValue=None, previousSystemMetadata=None, created=None)\\\\nBases\\\\n\\\\nentityType (str)\\\\nchangeType (Union[str, ChangeTypeClass])\\\\nauditHeader (Optional[KafkaAuditHeaderClass])\\\\nentityUrn (Optional[str])\\\\nentityKeyAspect (Optional[GenericAspectClass])\\\\naspectName (Optional[str])\\\\naspect (Optional[GenericAspectClass])\\\\nsystemMetadata (Optional[SystemMetadataClass])\\\\npreviousAspectValue (Optional[GenericAspectClass])\\\\npreviousSystemMetadata (Optional[SystemMetadataClass])\\\\ncreated (Optional[AuditStampClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspect None | str\\\\nAspect of the entity being written to\\\\nNot filling this out implies that the writer wants to affect the entire entity\\\\nNote None | KafkaAuditHeaderClass\\\\nKafka audit header. Currently remains unused in the open source.\\\\n\\\\n\\\\n\\\\nproperty changeType None | AuditStampClass\\\\nAn audit stamp detailing who and when the aspect was changed by. Required for all intents and purposes.\\\\n\\\\n\\\\n\\\\nproperty entityKeyAspect str\\\\nType of the entity being written to\\\\n\\\\n\\\\n\\\\nproperty entityUrn None | GenericAspectClass\\\\nThe previous value of the aspect that has changed.\\\\n\\\\n\\\\n\\\\nproperty previousSystemMetadata None | SystemMetadataClass\\\\nA string-&amp;gt;string map of custom properties that one might want to attach to an event\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.MetadataChangeProposalClass(entityType, changeType, auditHeader=None, entityUrn=None, entityKeyAspect=None, aspectName=None, aspect=None, systemMetadata=None)\\\\nBases\\\\n\\\\nentityType (str)\\\\nchangeType (Union[str, ChangeTypeClass])\\\\nauditHeader (Optional[KafkaAuditHeaderClass])\\\\nentityUrn (Optional[str])\\\\nentityKeyAspect (Optional[GenericAspectClass])\\\\naspectName (Optional[str])\\\\naspect (Optional[GenericAspectClass])\\\\nsystemMetadata (Optional[SystemMetadataClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty aspect None | str\\\\nAspect of the entity being written to\\\\nNot filling this out implies that the writer wants to affect the entire entity\\\\nNote None | KafkaAuditHeaderClass\\\\nKafka audit header. Currently remains unused in the open source.\\\\n\\\\n\\\\n\\\\nproperty changeType None | GenericAspectClass\\\\nKey aspect of the entity being written\\\\n\\\\n\\\\n\\\\nproperty entityType None | str\\\\nUrn of the entity being written\\\\n\\\\n\\\\n\\\\nproperty systemMetadata Aspect\\\\nMetrics to be featured for the MLModel.\\\\n\\\\nParameters None | List[str]\\\\nDecision Thresholds used (if any)?\\\\n\\\\n\\\\n\\\\nproperty performanceMeasures DictWrapper\\\\nSchema holder for MySql data definition language that describes an MySql table.\\\\n\\\\nParameters str\\\\nThe native schema in the dataset\\\\u2019s platform. This is a human readable (json blob) table schema.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.NativeGroupMembershipClass(nativeGroups)\\\\nBases\\\\nnativeGroups (List[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty nativeGroups DictWrapper\\\\nA record of all supported cells for a Notebook. Only one type of cell will be non-null.\\\\n\\\\nParameters None | ChartCellClass\\\\nThe chart cell content. The will be non-null only when all other cell field is null.\\\\n\\\\n\\\\n\\\\nproperty queryCell None | TextCellClass\\\\nThe text cell content. The will be non-null only when all other cell field is null.\\\\n\\\\n\\\\n\\\\nproperty type object\\\\nType of Notebook Cell\\\\n\\\\n\\\\nCHARTCELL = \'CHARTCELL\'\\\\n\\\\n\\\\n\\\\nQUERYCELL = \'QUERYCELL\'\\\\n\\\\n\\\\n\\\\nTEXTCELL = \'TEXTCELL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.NotebookContentClass(cells=None)\\\\nBases This is IN BETA version\\\\n\\\\nParameters List[NotebookCellClass]\\\\nThe content of a Notebook which is composed by a list of NotebookCell\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.NotebookInfoClass(title, changeAuditStamps, customProperties=None, externalUrl=None, description=None)\\\\nBases This is IN BETA version\\\\n\\\\nParameters ChangeAuditStampsClass\\\\nCaptures information about who created/last modified/deleted this Notebook and when\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nDetailed description about the Notebook\\\\n\\\\n\\\\n\\\\nproperty externalUrl str\\\\nTitle of the Notebook\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.NotebookKeyClass(notebookTool, notebookId)\\\\nBases\\\\n\\\\nnotebookTool (str)\\\\nnotebookId (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty notebookId str\\\\nThe name of the Notebook tool such as QueryBook, etc.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.NullTypeClass\\\\nBases DictWrapper\\\\nNumber data type Aspect\\\\nOperational info for an entity.\\\\n\\\\nParameters ClassVar[str] = \'timeseries\'\\\\n\\\\n\\\\n\\\\nproperty actor None | List[str]\\\\nWhich other datasets were affected by this operation.\\\\n\\\\n\\\\n\\\\nproperty customOperationType None | Dict[str, str]\\\\nCustom properties\\\\n\\\\n\\\\n\\\\nproperty eventGranularity int\\\\nThe time at which the operation occurred. Would be better named \\\\u2018operationTime\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty messageId None | int\\\\nHow many rows were affected by this operation.\\\\n\\\\n\\\\n\\\\nproperty operationType PartitionSpecClass | None\\\\nThe optional partition specification.\\\\n\\\\n\\\\n\\\\nproperty sourceType int\\\\nThe event timestamp field as epoch at UTC in milli seconds.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OperationSourceTypeClass\\\\nBases object\\\\nEnum to define the operation type when an entity changes.\\\\n\\\\n\\\\nALTER = \'ALTER\'\\\\n\\\\n\\\\n\\\\nCREATE = \'CREATE\'\\\\n\\\\n\\\\n\\\\nCUSTOM = \'CUSTOM\'\\\\n\\\\n\\\\n\\\\nDELETE = \'DELETE\'\\\\n\\\\n\\\\n\\\\nDROP = \'DROP\'\\\\n\\\\n\\\\n\\\\nINSERT = \'INSERT\'\\\\n\\\\n\\\\n\\\\nUNKNOWN = \'UNKNOWN\'\\\\n\\\\n\\\\n\\\\nUPDATE = \'UPDATE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OracleDDLClass(tableSchema)\\\\nBases\\\\ntableSchema (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty tableSchema DictWrapper\\\\nSchema text of an ORC schema.\\\\n\\\\nParameters str\\\\nThe native schema for ORC file format.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OriginClass(type, externalType=None)\\\\nBases\\\\n\\\\ntype (Union[str, OriginTypeClass])\\\\nexternalType (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty externalType str | OriginTypeClass\\\\nWhere an entity originated from. Either NATIVE or EXTERNAL.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OriginTypeClass\\\\nBases DictWrapper\\\\nSchema holder for undefined schema types.\\\\n\\\\nParameters str\\\\nThe native schema in the dataset\\\\u2019s platform.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OwnerClass(owner, type, typeUrn=None, source=None)\\\\nBases\\\\n\\\\nowner (str)\\\\ntype (Union[str, OwnershipTypeClass])\\\\ntypeUrn (Optional[str])\\\\nsource (Optional[OwnershipSourceClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty ownerldap, urncorpGrouplimpname\\\\n(Caveat\\\\nOwner URN, e.g. urn\\\\n\\\\nType None | OwnershipSourceClass\\\\nSource information for the ownership\\\\n\\\\n\\\\n\\\\nproperty type None | str\\\\nThe type of the ownership\\\\nUrn of type O\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OwnershipClass(owners, lastModified=None)\\\\nBases\\\\n\\\\nowners (List[OwnerClass])\\\\nlastModified (Optional[AuditStampClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty lastModified List[OwnerClass]\\\\nList of owners of the entity.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OwnershipSourceClass(type, url=None)\\\\nBases\\\\n\\\\ntype (Union[str, OwnershipSourceTypeClass])\\\\nurl (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty type None | str\\\\nA reference URL for the source\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OwnershipSourceTypeClass\\\\nBases object\\\\nAsset owner types\\\\n\\\\n\\\\nBUSINESSOWNER = \'BUSINESSOWNER\'\\\\n\\\\n\\\\n\\\\nCONSUMER = \'CONSUMER\'\\\\n\\\\n\\\\n\\\\nCUSTOM = \'CUSTOM\'\\\\n\\\\n\\\\n\\\\nDATAOWNER = \'DATAOWNER\'\\\\n\\\\n\\\\n\\\\nDATASTEWARD = \'DATASTEWARD\'\\\\n\\\\n\\\\n\\\\nDELEGATE = \'DELEGATE\'\\\\n\\\\n\\\\n\\\\nDEVELOPER = \'DEVELOPER\'\\\\n\\\\n\\\\n\\\\nNONE = \'NONE\'\\\\n\\\\n\\\\n\\\\nPRODUCER = \'PRODUCER\'\\\\n\\\\n\\\\n\\\\nSTAKEHOLDER = \'STAKEHOLDER\'\\\\n\\\\n\\\\n\\\\nTECHNICALOWNER = \'TECHNICALOWNER\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OwnershipTypeInfoClass(name, created, lastModified, description=None)\\\\nBases\\\\n\\\\nname (str)\\\\ncreated (AuditStampClass)\\\\nlastModified (AuditStampClass)\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created None | str\\\\nDescription of the Ownership Type\\\\n\\\\n\\\\n\\\\nproperty lastModified str\\\\nDisplay name of the Ownership Type\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.OwnershipTypeKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id DictWrapper\\\\nArbitrary key-value parameters for an Entity Change Event. (any record).\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PartitionSpecClass(partition, type=None, timePartition=None)\\\\nBases\\\\n\\\\npartition (str)\\\\ntype (Union[str, PartitionTypeClass, None])\\\\ntimePartition (Optional[TimeWindowClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty partition None | TimeWindowClass\\\\nTime window of the partition if applicable\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nFULLTABLE = \'FULLTABLE\'\\\\n\\\\n\\\\n\\\\nPARTITION = \'PARTITION\'\\\\n\\\\n\\\\n\\\\nQUERY = \'QUERY\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PlatformEventClass(header, name, payload)\\\\nBases\\\\n\\\\nheader (PlatformEventHeaderClass)\\\\nname (str)\\\\npayload (GenericPayloadClass)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty header str\\\\nThe name of the event, e.g. the type of event. For example, \\\\u2018notificationRequestEvent\\\\u2019, \\\\u2018entityChangeEvent\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty payload DictWrapper\\\\nA header included with each DataHub platform event.\\\\n\\\\nParameters int\\\\nThe event timestamp field as epoch at UTC in milli seconds.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PlatformTypeClass\\\\nBases object\\\\nThe matching condition in a filter criterion\\\\n\\\\n\\\\nEQUALS = \'EQUALS\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PolicyMatchCriterionClass(field, values, condition=None)\\\\nBases\\\\n\\\\nfield (str)\\\\nvalues (List[str])\\\\ncondition (Union[str, PolicyMatchConditionClass, None])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty condition str\\\\nThe name of the field that the criterion refers to\\\\n\\\\n\\\\n\\\\nproperty values DictWrapper\\\\nThe filter for specifying the resource or actor to apply privileges to\\\\n\\\\nParameters List[PolicyMatchCriterionClass]\\\\nA list of criteria to apply conjunctively (so all criteria must pass)\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PostContentClass(title, type, description=None, link=None, media=None)\\\\nBases\\\\n\\\\ntitle (str)\\\\ntype (Union[str, PostContentTypeClass])\\\\ndescription (Optional[str])\\\\nlink (Optional[str])\\\\nmedia (Optional[MediaClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty description None | str\\\\nOptional link that the post is associated with.\\\\n\\\\n\\\\n\\\\nproperty media str\\\\nTitle of the post.\\\\n\\\\n\\\\n\\\\nproperty type object\\\\nEnum defining the type of content held in a Post.\\\\n\\\\n\\\\nLINK = \'LINK\'\\\\n\\\\n\\\\n\\\\nTEXT = \'TEXT\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PostInfoClass(type, content, created, lastModified)\\\\nBases\\\\n\\\\ntype (Union[str, PostTypeClass])\\\\ncontent (PostContentClass)\\\\ncreated (int)\\\\nlastModified (int)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty content int\\\\nThe time at which the post was initially created\\\\n\\\\n\\\\n\\\\nproperty lastModified str | PostTypeClass\\\\nType of the Post.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PostKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id object\\\\nEnum defining types of Posts.\\\\n\\\\n\\\\nHOMEPAGEANNOUNCEMENT = \'HOMEPAGEANNOUNCEMENT\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.PrestoDDLClass(rawSchema)\\\\nBases\\\\nrawSchema (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty rawSchema DictWrapper\\\\n\\\\nParameters str\\\\n\\\\n\\\\n\\\\nproperty value Aspect\\\\nQuantitative analyses should be disaggregated, that is, broken down by the chosen factors. Quantitative analyses should provide the results of evaluating the MLModel according to the chosen metrics, providing confidence interval values when possible.\\\\n\\\\nParameters None | str\\\\nLink to a dashboard with results showing how the MLModel performed with respect to the intersection of evaluated factors?\\\\n\\\\n\\\\n\\\\nproperty unitaryResults DictWrapper\\\\nQuery cell in a Notebook, which will present content in query format\\\\n\\\\nParameters str\\\\nUnique id for the cell. This id should be globally unique for a Notebook tool even when there are multiple deployments of it. As an example, Notebook URL could be used here for QueryBook such as \\\\u2018querybook.com/notebook/773/?cellId=1234\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty cellTitle ChangeAuditStampsClass\\\\nCaptures information about who created/last modified/deleted this Notebook cell and when\\\\n\\\\n\\\\n\\\\nproperty lastExecuted str\\\\nRaw query to explain some specific logic in a Notebook\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.QueryKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id object\\\\n\\\\n\\\\nSQL = \'SQL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.QueryPropertiesClass(statement, source, created, lastModified, name=None, description=None)\\\\nBases\\\\n\\\\nstatement (QueryStatementClass)\\\\nsource (Union[str, QuerySourceClass])\\\\ncreated (AuditStampClass)\\\\nlastModified (AuditStampClass)\\\\nname (Optional[str])\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty created None | str\\\\nThe Query description.\\\\n\\\\n\\\\n\\\\nproperty lastModified None | str\\\\nOptional display name to identify the query.\\\\n\\\\n\\\\n\\\\nproperty source QueryStatementClass\\\\nThe Query Statement.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.QuerySourceClass\\\\nBases DictWrapper\\\\nA query statement against one or more data assets.\\\\n\\\\nParameters str | QueryLanguageClass\\\\nThe language of the Query, e.g. SQL.\\\\n\\\\n\\\\n\\\\nproperty value DictWrapper\\\\nA single subject of a particular query.\\\\nIn the future, we may evolve this model to include richer details\\\\nabout the Query Subject in relation to the query.\\\\n\\\\nParameters str\\\\nAn entity which is the subject of a query.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.QuerySubjectsClass(subjects)\\\\nBases\\\\nsubjects (List[QuerySubjectClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty subjects DictWrapper\\\\nRecord field type.\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.RetentionClass(version=None, time=None)\\\\nBases\\\\n\\\\nversion (Optional[VersionBasedRetentionClass])\\\\ntime (Optional[TimeBasedRetentionClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty time None | VersionBasedRetentionClass\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.RoleAssociationClass(urn)\\\\nBases\\\\nurn (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nKey for a External AccessManagement\\\\n\\\\nParameters str\\\\nA unique id for the access management IAM.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.RoleMembershipClass(roles)\\\\nBases\\\\nroles (List[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty roles Aspect\\\\nInformation about a ExternalRoleProperties\\\\n\\\\nParameters None | AuditStampClass\\\\nCreated Audit stamp\\\\n\\\\n\\\\n\\\\nproperty description str\\\\nDisplay name of the IAM Role in the external system\\\\n\\\\n\\\\n\\\\nproperty requestUrl str\\\\nCan be READ, ADMIN, WRITE\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.RoleUserClass(user)\\\\nBases\\\\nuser (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty user DictWrapper\\\\nAttributes defining a ROWCOUNTCHANGE volume assertion.\\\\n\\\\nParameters str | AssertionStdOperatorClass\\\\nThe operator you\\\\u2019d like to apply.\\\\nNote that only numeric operators are valid inputs AssertionStdParametersClass\\\\nThe parameters you\\\\u2019d like to provide as input to the operator.\\\\nNote that only numeric parameter types are valid inputs str | AssertionValueChangeTypeClass\\\\na fixed absolute value or a relative percentage.\\\\n\\\\nType DictWrapper\\\\nAttributes defining a ROWCOUNTTOTAL volume assertion.\\\\n\\\\nParameters str | AssertionStdOperatorClass\\\\nThe operator you\\\\u2019d like to apply.\\\\nNote that only numeric operators are valid inputs AssertionStdParametersClass\\\\nThe parameters you\\\\u2019d like to provide as input to the operator.\\\\nNote that only numeric parameter types are valid inputs object\\\\n\\\\n\\\\nFAILURE = \'FAILURE\'\\\\n\\\\n\\\\n\\\\nSKIPPED = \'SKIPPED\'\\\\n\\\\n\\\\n\\\\nSUCCESS = \'SUCCESS\'\\\\n\\\\n\\\\n\\\\nUPFORRETRY = \'UPFORRETRY\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SchemaAssertionInfoClass(entity, schema)\\\\nBases\\\\n\\\\nentity (str)\\\\nschema (SchemaMetadataClass)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entity SchemaMetadataClass\\\\nA definition of the expected structure for the asset\\\\nNote that many of the fields of this model, especially those related to metadata (tags, terms)\\\\nwill go unused in this context.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SchemaContractClass(assertion)\\\\nBases\\\\nassertion (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty assertion DictWrapper\\\\nSchemaField to describe metadata related to dataset schema.\\\\n\\\\nParameters None | AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this schema field.\\\\n\\\\n\\\\n\\\\nproperty description str\\\\nFlattened name of the field. Field is computed from jsonPath field.\\\\n\\\\n\\\\n\\\\nproperty globalTags None | GlossaryTermsClass\\\\nGlossary terms associated with the field\\\\n\\\\n\\\\n\\\\nproperty isPartOfKey None | bool\\\\nFor Datasets which are partitioned, this determines the partitioning key.\\\\n\\\\n\\\\n\\\\nproperty jsonPath None | str\\\\nFor schema fields that have other properties that are not modeled explicitly,\\\\nuse this field to serialize those properties into a JSON string\\\\n\\\\n\\\\n\\\\nproperty label None | AuditStampClass\\\\nAn AuditStamp corresponding to the last modification of this schema field.\\\\n\\\\n\\\\n\\\\nproperty nativeDataType bool\\\\nIndicates if this field is optional or nullable\\\\n\\\\n\\\\n\\\\nproperty recursive SchemaFieldDataTypeClass\\\\nPlatform independent field type of the field.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SchemaFieldDataTypeClass(type)\\\\nBases\\\\ntype (Union[BooleanTypeClass, FixedTypeClass, StringTypeClass, BytesTypeClass, NumberTypeClass, DateTypeClass, TimeTypeClass, EnumTypeClass, NullTypeClass, MapTypeClass, ArrayTypeClass, UnionTypeClass, RecordTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty type Aspect\\\\nKey for a SchemaField\\\\n\\\\nParameters str\\\\nfieldPath identifying the schema field\\\\n\\\\n\\\\n\\\\nproperty parent DictWrapper\\\\nA metadata snapshot for a specific schema field entity.\\\\n\\\\nParameters List[SchemaFieldKeyClass]\\\\nThe list of metadata aspects associated with the dataset. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn DictWrapper\\\\nLightweight spec used for referencing a particular schema field.\\\\n\\\\nParameters str\\\\nThe native field type\\\\n\\\\n\\\\n\\\\nproperty path str\\\\nThe DataHub standard schema field type.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SchemaMetadataClass(schemaName, platform, version, hash, platformSchema, fields, created=None, lastModified=None, deleted=None, dataset=None, cluster=None, primaryKeys=None, foreignKeysSpecs=None, foreignKeys=None)\\\\nBases\\\\n\\\\nschemaName (str)\\\\nplatform (str)\\\\nversion (int)\\\\nhash (str)\\\\nplatformSchema (Union[EspressoSchemaClass, OracleDDLClass, MySqlDDLClass, PrestoDDLClass, KafkaSchemaClass, BinaryJsonSchemaClass, OrcSchemaClass, SchemalessClass, KeyValueSchemaClass, OtherSchemaClass])\\\\nfields (List[SchemaFieldClass])\\\\ncreated (Optional[AuditStampClass])\\\\nlastModified (Optional[AuditStampClass])\\\\ndeleted (Optional[AuditStampClass])\\\\ndataset (Optional[str])\\\\ncluster (Optional[str])\\\\nprimaryKeys (Optional[List[str]])\\\\nforeignKeysSpecs (Optional[Dict[str, ForeignKeySpecClass]])\\\\nforeignKeys (Optional[List[ForeignKeyConstraintClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty cluster AuditStampClass\\\\nAn AuditStamp corresponding to the creation of this resource/association/sub-resource. A value of 0 for time indicates missing data.\\\\n\\\\n\\\\n\\\\nproperty dataset None | AuditStampClass\\\\nAn AuditStamp corresponding to the deletion of this resource/association/sub-resource. Logically, deleted MUST have a later timestamp than creation. It may or may not have the same time as lastModified depending upon the resource/association/sub-resource semantics.\\\\n\\\\n\\\\n\\\\nproperty fields None | List[ForeignKeyConstraintClass]\\\\nList of foreign key constraints for the schema\\\\n\\\\n\\\\n\\\\nproperty foreignKeysSpecs str\\\\nthe SHA1 hash of the schema content\\\\n\\\\n\\\\n\\\\nproperty lastModified str\\\\nplatform\\\\nStandardized platform urn where schema is defined. The data platform Urn (urn\\\\n\\\\nType EspressoSchemaClass | OracleDDLClass | MySqlDDLClass | PrestoDDLClass | KafkaSchemaClass | BinaryJsonSchemaClass | OrcSchemaClass | SchemalessClass | KeyValueSchemaClass | OtherSchemaClass\\\\nThe native schema in the dataset\\\\u2019s platform.\\\\n\\\\n\\\\n\\\\nproperty primaryKeys str\\\\nSchema name e.g. PageViewEvent, identity.Profile, ams.accountmanagementtracking\\\\n\\\\n\\\\n\\\\nproperty version DictWrapper\\\\nThe dataset has no specific schema associated with it\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SiblingsClass(siblings, primary)\\\\nBases\\\\n\\\\nsiblings (List[str])\\\\nprimary (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty primary List[str]\\\\nList of sibling entities\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SourceCodeClass(sourceCode)\\\\nBases\\\\nsourceCode (List[SourceCodeUrlClass])\\\\n\\\\n\\\\n\\\\n\\\\nproperty sourceCode DictWrapper\\\\nSource Code Url Entity\\\\n\\\\nParameters str\\\\nSource Code Url\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nEVALUATIONPIPELINESOURCECODE = \'EVALUATIONPIPELINESOURCECODE\'\\\\n\\\\n\\\\n\\\\nMLMODELSOURCECODE = \'MLMODELSOURCECODE\'\\\\n\\\\n\\\\n\\\\nTRAININGPIPELINESOURCECODE = \'TRAININGPIPELINESOURCECODE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SqlAssertionInfoClass(type, entity, statement, operator, parameters, changeType=None)\\\\nBases\\\\n\\\\ntype (Union[str, SqlAssertionTypeClass])\\\\nentity (str)\\\\nstatement (str)\\\\noperator (Union[str, AssertionStdOperatorClass])\\\\nparameters (AssertionStdParametersClass)\\\\nchangeType (Union[None, str, AssertionValueChangeTypeClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty changeType\\\\nThe type of the value used to evaluate the assertion\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entity str | AssertionStdOperatorClass\\\\nThe operator you\\\\u2019d like to apply to the result of the SQL query.\\\\nNote that at this time, only numeric operators are valid inputs AssertionStdParametersClass\\\\nThe parameters you\\\\u2019d like to provide as input to the operator.\\\\nNote that only numeric parameter types are valid inputs str\\\\nThe SQL statement to be executed when evaluating the assertion (or computing the metric).\\\\nThis should be a valid and complete statement, executable by itself.\\\\nUsually this should be a SELECT query statement.\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nMETRIC = \'METRIC\'\\\\n\\\\n\\\\n\\\\nMETRICCHANGE = \'METRICCHANGE\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.StatusClass(removed=None)\\\\nBases\\\\nremoved (Optional[bool])\\\\n\\\\n\\\\n\\\\n\\\\nproperty removed DictWrapper\\\\nString field type.\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.StructuredExecutionReportClass(type, serializedValue, contentType)\\\\nBases\\\\n\\\\ntype (str)\\\\nserializedValue (str)\\\\ncontentType (str)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty contentType str\\\\nThe serialized value of the structured report\\\\n\\\\n\\\\n\\\\nproperty type Aspect\\\\nSub Types. Use this aspect to specialize a generic Entity\\\\ne.g. Making a Dataset also be a View or also be a LookerExplore\\\\n\\\\nParameters List[str]\\\\nThe names of the specific types.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.SystemMetadataClass(lastObserved=None, runId=None, lastRunId=None, pipelineName=None, registryName=None, registryVersion=None, properties=None)\\\\nBases\\\\n\\\\nlastObserved (Optional[int])\\\\nrunId (Optional[str])\\\\nlastRunId (Optional[str])\\\\npipelineName (Optional[str])\\\\nregistryName (Optional[str])\\\\nregistryVersion (Optional[str])\\\\nproperties (Optional[Dict[str, str]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty lastObserved str | None\\\\nThe last run id that produced the metadata. Populated in case of batch-ingestion.\\\\n\\\\n\\\\n\\\\nproperty pipelineName None | Dict[str, str]\\\\nAdditional properties\\\\n\\\\n\\\\n\\\\nproperty registryName None | str\\\\nThe model registry version that was used to process this event\\\\n\\\\n\\\\n\\\\nproperty runId DictWrapper\\\\nProperties of an applied tag. For now, just an Urn. In the future we can extend this with other properties, e.g.\\\\npropagation parameters.\\\\n\\\\nParameters None | str\\\\nAdditional context about the association\\\\n\\\\n\\\\n\\\\nproperty tag Aspect\\\\nKey for a Tag\\\\n\\\\nParameters str\\\\nThe tag name, which serves as a unique id\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TagPropertiesClass(name, description=None, colorHex=None)\\\\nBases\\\\n\\\\nname (str)\\\\ndescription (Optional[str])\\\\ncolorHex (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty colorHex None | str\\\\nDocumentation of the tag\\\\n\\\\n\\\\n\\\\nproperty name DictWrapper\\\\nA metadata snapshot for a specific dataset entity.\\\\n\\\\nParameters List[TagKeyClass | OwnershipClass | TagPropertiesClass | StatusClass]\\\\nThe list of metadata aspects associated with the dataset. Depending on the use case, this can either be all, or a selection, of supported aspects.\\\\n\\\\n\\\\n\\\\nproperty urn Aspect\\\\nA simple wrapper around a String to persist the client ID for telemetry in DataHub\\\\u2019s backend DB\\\\n\\\\nParameters str\\\\nA string representing the telemetry client ID\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TelemetryKeyClass(name)\\\\nBases\\\\nname (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty name DictWrapper\\\\n\\\\nParameters None | str\\\\nJSON format configuration for the test\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nJSON = \'JSON\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TestInfoClass(name, category, definition, description=None)\\\\nBases\\\\n\\\\nname (str)\\\\ncategory (str)\\\\ndefinition (TestDefinitionClass)\\\\ndescription (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty category TestDefinitionClass\\\\nConfiguration for the Test\\\\n\\\\n\\\\n\\\\nproperty description str\\\\nThe name of the test\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TestKeyClass(id)\\\\nBases\\\\nid (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty id DictWrapper\\\\nInformation about a Test Result\\\\n\\\\nParameters str\\\\nThe urn of the test\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nFAILURE = \'FAILURE\'\\\\n\\\\n\\\\n\\\\nSUCCESS = \'SUCCESS\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TestResultsClass(failing, passing)\\\\nBases\\\\n\\\\nfailing (List[TestResultClass])\\\\npassing (List[TestResultClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty failing List[TestResultClass]\\\\nResults that are passing\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TextCellClass(cellId, changeAuditStamps, text, cellTitle=None)\\\\nBases\\\\n\\\\ncellId (str)\\\\nchangeAuditStamps (ChangeAuditStampsClass)\\\\ntext (str)\\\\ncellTitle (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty cellId None | str\\\\nTitle of the cell\\\\n\\\\n\\\\n\\\\nproperty changeAuditStamps str\\\\nThe actual text in a TextCell in a Notebook\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TimeBasedRetentionClass(maxAgeInSeconds)\\\\nBases\\\\nmaxAgeInSeconds (int)\\\\n\\\\n\\\\n\\\\n\\\\nproperty maxAgeInSeconds DictWrapper\\\\nA standard event timestamp\\\\n\\\\nParameters None | str\\\\nThe actor urn involved in the event.\\\\n\\\\nType int\\\\nWhen did the event occur\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TimeTypeClass\\\\nBases DictWrapper\\\\n\\\\nParameters TimeWindowSizeClass\\\\nThe length of the window.\\\\n\\\\n\\\\n\\\\nproperty startTimeMillis DictWrapper\\\\nDefines the size of a time window.\\\\n\\\\nParameters int\\\\nHow many units. Defaults to 1.\\\\n\\\\n\\\\n\\\\nproperty unit Aspect\\\\nIdeally, the MLModel card would contain as much information about the training data as the evaluation data. However, there might be cases where it is not feasible to provide this level of detailed information about the training data. For example, the data may be proprietary, or require a non-disclosure agreement. In these cases, we advocate for basic details about the distributions over groups in the data, as well as any other details that could inform stakeholders on the kinds of biases the model may have encoded.\\\\n\\\\nParameters List[BaseDataClass]\\\\nDetails on the dataset(s) used for training the MLModel\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.TransformationTypeClass\\\\nBases DictWrapper\\\\nField transformation expressed in UDF\\\\n\\\\nParameters str\\\\nA UDF mentioning how the source fields got transformed to destination field. This is the FQCN(Fully Qualified Class Name) of the udf.\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.UnionTypeClass(nestedTypes=None)\\\\nBases\\\\nnestedTypes (Optional[List[str]])\\\\n\\\\n\\\\n\\\\n\\\\nproperty nestedTypes DictWrapper\\\\nUpstream lineage information about a dataset including the source reporting the lineage\\\\n\\\\nParameters AuditStampClass\\\\nAudit stamp containing who reported the lineage and when.\\\\n\\\\n\\\\n\\\\nproperty created str\\\\nThe upstream dataset the lineage points to\\\\n\\\\n\\\\n\\\\nproperty properties str | DatasetLineageTypeClass\\\\nThe type of the lineage\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.UpstreamLineageClass(upstreams, fineGrainedLineages=None)\\\\nBases\\\\n\\\\nupstreams (List[UpstreamClass])\\\\nfineGrainedLineages (Optional[List[FineGrainedLineageClass]])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty fineGrainedLineages List[UpstreamClass]\\\\nList of upstream dataset lineage information\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.UrnForeignKeyClass(currentFieldPath)\\\\nBases\\\\ncurrentFieldPath (str)\\\\n\\\\n\\\\n\\\\n\\\\nproperty currentFieldPath DictWrapper\\\\nUsage data for a given resource, rolled up into a bucket.\\\\n\\\\nParameters int\\\\nBucket start time in milliseconds\\\\n\\\\n\\\\n\\\\nproperty duration UsageAggregationMetricsClass\\\\nMetrics associated with this bucket\\\\n\\\\n\\\\n\\\\nproperty resource DictWrapper\\\\nMetrics for usage data for a given resource and bucket. Not all fields\\\\nmake sense for all buckets, so every field is optional.\\\\n\\\\nParameters None | List[FieldUsageCountsClass]\\\\nField-level usage stats\\\\n\\\\n\\\\n\\\\nproperty topSqlQueries None | int\\\\nTotal SQL query count\\\\n\\\\n\\\\n\\\\nproperty uniqueUserCount None | List[UserUsageCountsClass]\\\\nUsers within this bucket, with frequency counts\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.UserUsageCountsClass(count, user=None, userEmail=None)\\\\nBases\\\\n\\\\ncount (int)\\\\nuser (Optional[str])\\\\nuserEmail (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty count None | str\\\\n\\\\n\\\\n\\\\nproperty userEmail DictWrapper\\\\n\\\\nParameters int\\\\n\\\\n\\\\n\\\\nproperty value DictWrapper\\\\nKeep max N latest records\\\\n\\\\nParameters int\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.VersionInfoClass(version, versionType, customProperties=None, externalUrl=None)\\\\nBases\\\\n\\\\nversion (str)\\\\nversionType (str)\\\\ncustomProperties (Optional[Dict[str, str]])\\\\nexternalUrl (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty customProperties None | str\\\\nURL where the reference exist\\\\n\\\\n\\\\n\\\\nproperty version str\\\\nThe type of the version like git hash or md5 hash\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.VersionTagClass(versionTag=None)\\\\nBases\\\\nversionTag (Optional[str])\\\\n\\\\n\\\\n\\\\n\\\\nproperty versionTag Aspect\\\\nDetails about a View.\\\\ne.g. Gets activated when subTypes is view\\\\n\\\\nParameters bool\\\\nWhether the view is materialized\\\\n\\\\n\\\\n\\\\nproperty viewLanguage str\\\\nThe view logic\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.VolumeAssertionInfoClass(type, entity, rowCountTotal=None, rowCountChange=None, incrementingSegmentRowCountTotal=None, incrementingSegmentRowCountChange=None, filter=None)\\\\nBases\\\\n\\\\ntype (Union[str, VolumeAssertionTypeClass])\\\\nentity (str)\\\\nrowCountTotal (Optional[RowCountTotalClass])\\\\nrowCountChange (Optional[RowCountChangeClass])\\\\nincrementingSegmentRowCountTotal (Optional[IncrementingSegmentRowCountTotalClass])\\\\nincrementingSegmentRowCountChange (Optional[IncrementingSegmentRowCountChangeClass])\\\\nfilter (Optional[DatasetFilterClass])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entity None | DatasetFilterClass\\\\nA definition of the specific filters that should be applied, when performing monitoring.\\\\nIf not provided, there is no filter, and the full table is under consideration.\\\\n\\\\n\\\\n\\\\nproperty incrementingSegmentRowCountChange None | IncrementingSegmentRowCountTotalClass\\\\nProduce FAILURE Assertion Result if the asset\\\\u2019s latest incrementing segment row count total\\\\ndoes not meet specific requirements. Required if type is \\\\u2018INCREMENTINGSEGMENTROWCOUNTTOTAL\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty rowCountChange None | RowCountTotalClass\\\\nProduce FAILURE Assertion Result if the row count of the asset does not meet specific requirements.\\\\nRequired if type is \\\\u2018ROWCOUNTTOTAL\\\\u2019\\\\n\\\\n\\\\n\\\\nproperty type object\\\\n\\\\n\\\\nINCREMENTINGSEGMENTROWCOUNTCHANGE = \'INCREMENTINGSEGMENTROWCOUNTCHANGE\'\\\\n\\\\n\\\\n\\\\nINCREMENTINGSEGMENTROWCOUNTTOTAL = \'INCREMENTINGSEGMENTROWCOUNTTOTAL\'\\\\n\\\\n\\\\n\\\\nROWCOUNTCHANGE = \'ROWCOUNTCHANGE\'\\\\n\\\\n\\\\n\\\\nROWCOUNTTOTAL = \'ROWCOUNTTOTAL\'\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.schemaclasses.WindowDurationClass\\\\nBases\\\\nfullname (str)\\\\n\\\\nReturn type:\\\\nRecordSchema\\\\n\\\\n\\\\n\\\\n\\\\n\\"}}&gt;","sidebar":"overviewSidebar"},"python-sdk/urns":{"id":"python-sdk/urns","title":"URNs","description":"\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.AssertionUrn(assertionid, *, allowcoercion=True)\\\\nBases\\\\n\\\\nassertionid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nproperty assertionid\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (AssertionKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises\\\\nAssertionKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.ChartUrn(dashboardtool, chartid, *, allowcoercion=True)\\\\nBases\\\\n\\\\ndashboardtool (str)\\\\nchartid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 2\\\\n\\\\n\\\\n\\\\nproperty chartid\\\\nurnstr (str)\\\\n\\\\nReturn type str\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nChartUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'container\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (ContainerKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'corpGroup\'\\\\n\\\\n\\\\n\\\\nURNPARTS Use the constructor instead\\\\n\\\\n\\\\nParameters\\\\nCorpGroupUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nCorpGroupUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name\\\\nCorpGroupKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.CorpUserUrn(username, *, allowcoercion=True)\\\\nBases\\\\n\\\\nusername (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod createfromid(id)\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nid (str)\\\\n\\\\nReturn type\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (CorpUserKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises\\\\nCorpUserKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty username SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'dashboard\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type str\\\\n\\\\n\\\\n\\\\nproperty dashboardtool List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (DashboardKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises\\\\nDashboardKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataContractUrn(id, *, allowcoercion=True)\\\\nBases\\\\n\\\\nid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nDataContractUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty id\\\\nDataContractKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataFlowUrn(orchestrator, flowid, cluster, , _allow_coercion=True)\\\\nBases\\\\n\\\\norchestrator (str)\\\\nflow_id (str)\\\\ncluster (str)\\\\n_allow_coercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITY_TYPE ClassVar[int] = 3\\\\n\\\\n\\\\n\\\\nproperty cluster\\\\n\\\\norchestrator (str)\\\\nflow_id (str)\\\\nenv (str)\\\\nplatform_instance (Optional[str])\\\\n\\\\n\\\\nReturn type\\\\nurn_str (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entity_type str\\\\n\\\\n\\\\n\\\\nclassmethod from_key_aspect(key_aspect)\\\\n\\\\nParameters\\\\nDataFlowUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod from_string(urn_str)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(_UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nget_env()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nget_flow_id()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nget_orchestrator_name()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty orchestrator\\\\nDataFlowKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataHubRoleUrn(id, , allowcoercion=True)\\\\nBases\\\\n\\\\nid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nDataHubRoleUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty id\\\\nDataHubRoleKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataHubViewUrn(id, , _allow_coercion=True)\\\\nBases\\\\n\\\\nid (str)\\\\n_allow_coercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITY_TYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod create_from_string(urn_str)\\\\n\\\\nParameters\\\\nTypeVar(_UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entity_ids str\\\\n\\\\n\\\\n\\\\nclassmethod from_key_aspect(key_aspect)\\\\n\\\\nParameters\\\\nDataHubViewUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod from_string(urn_str)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(_UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty id\\\\nDataHubViewKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataJobUrn(flow, job_id, , allowcoercion=True)\\\\nBases\\\\n\\\\nflow (str)\\\\njobid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 2\\\\n\\\\n\\\\n\\\\nclassmethod createfromids(dataflowurn, jobid)\\\\n\\\\nParameters\\\\nDataJobUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nproperty flow\\\\nkeyaspect (DataJobKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises\\\\nDataFlowUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetjobid()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty jobid\\\\nDataJobKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataPlatformUrn(platformname, *, allowcoercion=True)\\\\nBases\\\\n\\\\nplatformname (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod createfromid(id)\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nid (str)\\\\n\\\\nReturn type\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (DataPlatformKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'dataProcessInstance\'\\\\n\\\\n\\\\n\\\\nURNPARTS Use the constructor instead\\\\n\\\\n\\\\nParameters\\\\nDataProcessInstanceUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nDataProcessInstanceUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetdataprocessinstanceid()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty id\\\\nDataProcessInstanceKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataProcessUrn(name, orchestrator, env=\'PROD\', *, allowcoercion=True)\\\\nBases\\\\n\\\\nname (str)\\\\norchestrator (str)\\\\nenv (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 3\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nproperty env\\\\nkeyaspect (DataProcessKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\nproperty orchestrator\\\\nDataProcessKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DataProductUrn(id, *, allowcoercion=True)\\\\nBases\\\\n\\\\nid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nDataProductUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty id\\\\nDataProductKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DatasetUrn(platform, name, env=\'PROD\', *, allowcoercion=True)\\\\nBases\\\\n\\\\nplatform (str)\\\\nname (str)\\\\nenv (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 3\\\\n\\\\n\\\\n\\\\nclassmethod createfromids(platformid, tablename, env, platforminstance=None)\\\\n\\\\nParameters\\\\nDatasetUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nproperty env\\\\nkeyaspect (DatasetKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises\\\\nDataPlatformUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetdatasetname()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetenv()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nstatic getsimplefieldpathfromv2fieldpath()\\\\nA helper function to extract simple . path notation from the v2 field path\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nfieldpath (str)\\\\n\\\\nReturn type str\\\\n\\\\n\\\\n\\\\nproperty platform\\\\nDatasetKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.DomainUrn(id, , _allow_coercion=True)\\\\nBases\\\\n\\\\nid (str)\\\\n_allow_coercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITY_TYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod create_from_id(id)\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nid (str)\\\\n\\\\nReturn type\\\\nurn_str (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entity_type\\\\nkey_aspect (DomainKeyClass)\\\\n\\\\nReturn type\\\\nurn_str (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\nto_key_aspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn_url_encoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'glossaryNode\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (GlossaryNodeKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type _SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'glossaryTerm\'\\\\n\\\\n\\\\n\\\\nURN_PARTS\\\\nurn_str (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entity_type\\\\nkey_aspect (GlossaryTermKeyClass)\\\\n\\\\nReturn type\\\\nurn_str (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\nto_key_aspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn_url_encoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'mlFeatureTable\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (MLFeatureTableKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\nproperty platform\\\\nMLFeatureTableKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.MlFeatureUrn(featurenamespace, name, *, allowcoercion=True)\\\\nBases\\\\n\\\\nfeaturenamespace (str)\\\\nname (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 2\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nproperty featurenamespace\\\\nkeyaspect (MLFeatureKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'mlModelDeployment\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nMlModelDeploymentUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'mlModelGroup\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nMlModelGroupUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'mlModel\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nMlModelUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type _SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'mlPrimaryKey\'\\\\n\\\\n\\\\n\\\\nURN_PARTS\\\\nurn_str (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entity_type str\\\\n\\\\n\\\\n\\\\nclassmethod from_key_aspect(key_aspect)\\\\n\\\\nParameters\\\\nMlPrimaryKeyUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod from_string(urn_str)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(_UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty name\\\\nMLPrimaryKeyKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.NotebookUrn(notebook_tool, notebook_id, , allowcoercion=True)\\\\nBases\\\\n\\\\nnotebooktool (str)\\\\nnotebookid (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 2\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nNotebookUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetnotebookid()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\ngetplatformid()\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty notebookid str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type _SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'ownershipType\'\\\\n\\\\n\\\\n\\\\nURN_PARTS\\\\nurn_str (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entity_type\\\\nkey_aspect (OwnershipTypeKeyClass)\\\\n\\\\nReturn type\\\\nurn_str (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\nto_key_aspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn_url_encoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'post\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (PostKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type _SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'query\'\\\\n\\\\n\\\\n\\\\nURN_PARTS\\\\nurn_str (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entity_type\\\\nkey_aspect (QueryKeyClass)\\\\n\\\\nReturn type\\\\nurn_str (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\nto_key_aspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn_url_encoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'role\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (RoleKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'schemaField\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype str\\\\n\\\\n\\\\n\\\\nclassmethod fromkeyaspect(keyaspect)\\\\n\\\\nParameters\\\\nSchemaFieldUrn\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty parent\\\\nSchemaFieldKeyClass\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclass datahub.metadata.urns.TagUrn(name, *, allowcoercion=True)\\\\nBases\\\\n\\\\nname (str)\\\\nallowcoercion (bool)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nENTITYTYPE ClassVar[int] = 1\\\\n\\\\n\\\\n\\\\nclassmethod createfromid(id)\\\\n\\\\nDeprecated since version 0.12.0.2\\\\nid (str)\\\\n\\\\nReturn type\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (TagKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type SpecificUrn\\\\n\\\\nParameters ClassVar[str] = \'test\'\\\\n\\\\n\\\\n\\\\nURNPARTS\\\\nurnstr (str)\\\\n\\\\nReturn type List[str]\\\\n\\\\n\\\\n\\\\nproperty entitytype\\\\nkeyaspect (TestKeyClass)\\\\n\\\\nReturn type\\\\nurnstr (str) \\\\u2013 The string representation of the Urn.\\\\n\\\\nReturn type\\\\nUrn of the given string representation.\\\\n\\\\nRaises str\\\\n\\\\n\\\\n\\\\ntokeyaspect()\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurnurlencoded()\\\\n\\\\nReturn type object\\\\nURNs are globally unique identifiers used to refer to entities.\\\\nIt will be in format of urn&amp;lt;type&amp;gt;li(&amp;lt;id1&amp;gt;,&amp;lt;id2&amp;gt;,\\\\u2026)\\\\nA note on encoding\\\\n\\\\nentitytype (str)\\\\nentityid (List[str])\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nclassmethod createfromstring(urnstr)\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nproperty entityids str\\\\n\\\\n\\\\n\\\\nclassmethod fromstring(urnstr)\\\\nCreates an Urn from its string representation.\\\\n\\\\nParameters\\\\nTypeVar(UrnSelf, bound= Urn)\\\\n\\\\nReturns\\\\nInvalidUrnError \\\\u2013 If the string representation is in invalid format.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\nurn()\\\\nGet the string representation of the urn.\\\\n\\\\nReturn type\\\\nstr\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n\\"}}&gt;","sidebar":"overviewSidebar"},"README":{"id":"README","title":"Introduction","description":"DataHub is a data discovery application built on an extensible metadata platform that helps you tame the complexity of diverse data ecosystems."},"releases":{"id":"releases","title":"DataHub Releases","description":"Summary","sidebar":"overviewSidebar"},"SECURITY":{"id":"SECURITY","title":"Reporting Security Issues","description":"If you think you have found a security vulnerability, please send a report to security@datahubproject.io. This address can be used for all of Acryl Data\u2019s open source and commercial products (including but not limited to DataHub and Acryl Data). We can accept only vulnerability reports at this address.","sidebar":"overviewSidebar"}}}')}}]);</pre></body></html>