Skip to content

Commit

Permalink
remove absProperties
Browse files Browse the repository at this point in the history
  • Loading branch information
orenccl committed Dec 16, 2024
1 parent 4418758 commit 3c2a088
Show file tree
Hide file tree
Showing 7 changed files with 31 additions and 60 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import javax.annotation.Nonnull;
import org.apache.gravitino.catalog.hadoop.fs.FileSystemProvider;
import org.apache.gravitino.catalog.hadoop.fs.FileSystemUtils;
import org.apache.gravitino.storage.ABSProperties;
import org.apache.gravitino.storage.AzureProperties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand All @@ -49,13 +49,13 @@ public FileSystem getFileSystem(@Nonnull Path path, @Nonnull Map<String, String>
Map<String, String> hadoopConfMap =
FileSystemUtils.toHadoopConfigMap(config, ImmutableMap.of());

if (config.containsKey(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME)
&& config.containsKey(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY)) {
if (config.containsKey(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME)
&& config.containsKey(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY)) {
hadoopConfMap.put(
String.format(
"fs.azure.account.key.%s.dfs.core.windows.net",
config.get(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME)),
config.get(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY));
config.get(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME)),
config.get(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY));
}

if (!config.containsKey(ABFS_IMPL_KEY)) {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import org.apache.gravitino.abs.fs.AzureFileSystemProvider;
import org.apache.gravitino.file.Fileset;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.storage.ABSProperties;
import org.apache.gravitino.storage.AzureProperties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -113,8 +113,8 @@ protected String defaultBaseLocation() {

protected void createCatalog() {
Map<String, String> map = Maps.newHashMap();
map.put(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
map.put(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
map.put(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
map.put(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
map.put(FILESYSTEM_PROVIDERS, AzureFileSystemProvider.ABS_PROVIDER_NAME);
metalake.createCatalog(catalogName, Catalog.Type.FILESET, provider, "comment", map);

Expand All @@ -138,8 +138,8 @@ public void testCreateSchemaAndFilesetWithSpecialLocation() {
GravitinoITUtils.genRandomName("CatalogCatalogIT"));
Map<String, String> catalogProps = Maps.newHashMap();
catalogProps.put("location", ossLocation);
catalogProps.put(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
catalogProps.put(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
catalogProps.put(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
catalogProps.put(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
catalogProps.put(FILESYSTEM_PROVIDERS, AzureFileSystemProvider.ABS_PROVIDER_NAME);

Catalog localCatalog =
Expand Down
4 changes: 2 additions & 2 deletions clients/client-python/tests/integration/test_gvfs_with_abs.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,8 @@ def _init_test_entities(cls):
comment="",
properties={
"filesystem-providers": "abs",
"azure-account-name": cls.azure_abs_account_name,
"azure-account-key": cls.azure_abs_account_key,
"azure-storage-account-name": cls.azure_abs_account_name,
"azure-storage-account-key": cls.azure_abs_account_key,
},
)
catalog.as_schemas().create_schema(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import org.apache.gravitino.abs.fs.AzureFileSystemProvider;
import org.apache.gravitino.catalog.hadoop.fs.FileSystemUtils;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.gravitino.storage.ABSProperties;
import org.apache.gravitino.storage.AzureProperties;
import org.apache.hadoop.conf.Configuration;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
Expand Down Expand Up @@ -77,8 +77,8 @@ public void startUp() throws Exception {

Map<String, String> properties = Maps.newHashMap();

properties.put(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
properties.put(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
properties.put(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
properties.put(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
properties.put(FILESYSTEM_PROVIDERS, AzureFileSystemProvider.ABS_PROVIDER_NAME);

Catalog catalog =
Expand All @@ -96,8 +96,8 @@ public void startUp() throws Exception {
conf.set("fs.gravitino.client.metalake", metalakeName);

// Pass this configuration to the real file system
conf.set(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
conf.set(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
conf.set(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME, ABS_ACCOUNT_NAME);
conf.set(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY, ABS_ACCOUNT_KEY);
conf.set("fs.abfss.impl", "org.apache.hadoop.fs.azurebfs.SecureAzureBlobFileSystem");
}

Expand Down Expand Up @@ -133,13 +133,13 @@ protected Configuration convertGvfsConfigToRealFileSystemConfig(Configuration gv

Map<String, String> hadoopConfMap = FileSystemUtils.toHadoopConfigMap(map, ImmutableMap.of());

if (gvfsConf.get(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME) != null
&& gvfsConf.get(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY) != null) {
if (gvfsConf.get(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME) != null
&& gvfsConf.get(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY) != null) {
hadoopConfMap.put(
String.format(
"fs.azure.account.key.%s.dfs.core.windows.net",
gvfsConf.get(ABSProperties.GRAVITINO_AZURE_ACCOUNT_NAME)),
gvfsConf.get(ABSProperties.GRAVITINO_AZURE_ACCOUNT_KEY));
gvfsConf.get(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_NAME)),
gvfsConf.get(AzureProperties.GRAVITINO_AZURE_STORAGE_ACCOUNT_KEY));
}

hadoopConfMap.forEach(absConf::set);
Expand Down
12 changes: 6 additions & 6 deletions docs/hadoop-catalog.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,12 @@ In the meantime, you need to place the corresponding bundle jar [`gravitino-aliy

#### Azure Blob Storage fileset

| Configuration item | Description | Default value | Required | Since version |
|-------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------|-------------------------------------------|------------------|
| `filesystem-providers` | The file system providers to add. Set it to `abs` if it's a Azure Blob Storage fileset, or a comma separated string that contains `abs` like `oss,abs,s3` to support multiple kinds of fileset including `abs`. | (none) | Yes | 0.8.0-incubating |
| `default-filesystem-provider` | The name default filesystem providers of this Hadoop catalog if users do not specify the scheme in the URI. Default value is `builtin-local`, for Azure Blob Storage, if we set this value, we can omit the prefix 'abfss://' in the location. | `builtin-local` | No | 0.8.0-incubating |
| `azure-account-name` | The account name of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |
| `azure-account-key` | The account key of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |
| Configuration item | Description | Default value | Required | Since version |
|-----------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------|-------------------------------------------|------------------|
| `filesystem-providers` | The file system providers to add. Set it to `abs` if it's a Azure Blob Storage fileset, or a comma separated string that contains `abs` like `oss,abs,s3` to support multiple kinds of fileset including `abs`. | (none) | Yes | 0.8.0-incubating |
| `default-filesystem-provider` | The name default filesystem providers of this Hadoop catalog if users do not specify the scheme in the URI. Default value is `builtin-local`, for Azure Blob Storage, if we set this value, we can omit the prefix 'abfss://' in the location. | `builtin-local` | No | 0.8.0-incubating |
| `azure-storage-account-name ` | The account name of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |
| `azure-storage-account-key` | The account key of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |

Similar to the above, you need to place the corresponding bundle jar [`gravitino-azure-bundle-${version}.jar`](https://repo1.maven.org/maven2/org/apache/gravitino/azure-bundle/) in the directory `${GRAVITINO_HOME}/catalogs/hadoop/libs`.

Expand Down
8 changes: 4 additions & 4 deletions docs/how-to-use-gvfs.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,10 @@ In the meantime, you need to place the corresponding bundle jar [`gravitino-aliy

#### Azure Blob Storage fileset

| Configuration item | Description | Default value | Required | Since version |
|--------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|-------------------------------------------|------------------|
| `azure-account-name` | The account name of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |
| `azure-account-key` | The account key of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |
| Configuration item | Description | Default value | Required | Since version |
|-----------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|-------------------------------------------|------------------|
| `azure-storage-account-name` | The account name of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |
| `azure-storage-account-key` | The account key of Azure Blob Storage. | (none) | Yes if it's a Azure Blob Storage fileset. | 0.8.0-incubating |

Similar to the above, you need to place the corresponding bundle jar [`gravitino-azure-bundle-${version}.jar`](https://repo1.maven.org/maven2/org/apache/gravitino/azure-bundle/) in the Hadoop environment(typically located in `${HADOOP_HOME}/share/hadoop/common/lib/`).

Expand Down

0 comments on commit 3c2a088

Please sign in to comment.