Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[refactor](properties) Refactor property #46833

Draft
wants to merge 10 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3031,6 +3031,11 @@ public class Config extends ConfigBase {
})
public static boolean enable_feature_data_sync_job = false;

@ConfField(description = {
"存放 hadoop conf 配置文件的默认目录。",
"The default directory for storing hadoop conf configuration files."})
public static String hadoop_config_dir = EnvUtils.getDorisHome() + "/plugins/hadoop_conf/";

//==========================================================================
// begin of cloud config
//==========================================================================
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package org.apache.doris.common;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

import java.io.File;

public class ConfigurationUtils {

/**
* Loads the Hadoop configuration files from the specified directory.
* <p>
* This method reads a comma-separated list of resource files from the given
* `resourcesPath`, constructs their absolute paths based on the `Config.external_catalog_config_dir`,
* and then loads these files into a Hadoop `Configuration` object.
*
* @param resourcesPath The comma-separated list of Hadoop configuration resource files to load.
* This must not be null or empty.
* @return The Hadoop `Configuration` object with the loaded configuration files.
* @throws IllegalArgumentException If the provided `resourcesPath` is blank, or if any of the specified
* configuration files do not exist or are not regular files.
*/
public static Configuration loadConfigurationFromHadoopConfDir(String resourcesPath) {
// Check if the provided resourcesPath is blank and throw an exception if so.
if (StringUtils.isBlank(resourcesPath)) {
throw new IllegalArgumentException("Hadoop config resource path is empty");
}

// Create a new Hadoop Configuration object without loading default resources.
Configuration conf = new Configuration(false);

// Iterate over the comma-separated list of resource files.
for (String resource : resourcesPath.split(",")) {
// Construct the full path to the resource file.
String resourcePath = Config.hadoop_config_dir + File.separator + resource.trim();
File file = new File(resourcePath);

// Check if the file exists and is a regular file; if not, throw an exception.
if (file.exists() && file.isFile()) {
// Add the resource file to the Hadoop Configuration object.
conf.addResource(new Path(file.toURI()));
} else {
// Throw an exception if the file does not exist or is not a regular file.
throw new IllegalArgumentException("Hadoop config resource file does not exist: " + resourcePath);
}
}
// Return the populated Hadoop Configuration object.
return conf;
}
}
3 changes: 3 additions & 0 deletions fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java
Original file line number Diff line number Diff line change
Expand Up @@ -1795,6 +1795,9 @@ public boolean postProcessAfterMetadataReplayed(boolean waitCatalogReady) {
} catch (Throwable t) {
LOG.warn("compatibleMTMV failed", t);
}

// handle catalog resource
catalogMgr.removeCatalogResource();
return true;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public CatalogMgr() {
public static CatalogMgr read(DataInput in) throws IOException {
String json = Text.readString(in);
if (LOG.isDebugEnabled()) {
LOG.debug("debug: read json: {}", json);
LOG.debug("Reading catalog configuration from JSON: {}", json);
}
return GsonUtils.GSON.fromJson(json, CatalogMgr.class);
}
Expand All @@ -137,7 +137,7 @@ private void addCatalog(CatalogIf catalog) {

private CatalogIf removeCatalog(long catalogId) {
CatalogIf catalog = idToCatalog.remove(catalogId);
LOG.info("Removed catalog with id {}, name {}", catalogId, catalog == null ? "N/A" : catalog.getName());
LOG.info("Removed catalog [id: {}, name: {}]", catalogId, catalog == null ? "N/A" : catalog.getName());
if (catalog != null) {
catalog.onClose();
nameToCatalog.remove(catalog.getName());
Expand Down Expand Up @@ -243,7 +243,7 @@ private void createCatalogImpl(CatalogIf catalog, String catalogName,
try {
if (nameToCatalog.containsKey(catalog.getName())) {
if (ifNotExists) {
LOG.warn("Catalog {} is already exist.", catalogName);
LOG.warn("Catalog '{}' already exists", catalogName);
return;
}
throw new DdlException("Catalog had already exist with name: " + catalogName);
Expand Down Expand Up @@ -280,7 +280,7 @@ public void dropCatalog(String catalogName, boolean ifExists) throws UserExcepti
writeLock();
try {
if (ifExists && !nameToCatalog.containsKey(catalogName)) {
LOG.warn("Non catalog {} is found.", catalogName);
LOG.warn("No catalog found with name '{}'", catalogName);
return;
}
CatalogIf<DatabaseIf<TableIf>> catalog = nameToCatalog.get(catalogName);
Expand Down Expand Up @@ -815,7 +815,7 @@ public void addExternalPartitions(String catalogName, String dbName, String tabl
return;
}
if (!(table instanceof HMSExternalTable)) {
LOG.warn("only support HMSTable");
LOG.warn("Operation only supported for HMS (Hive Metastore) tables");
return;
}

Expand Down Expand Up @@ -895,4 +895,17 @@ public Map<Long, CatalogIf<? extends DatabaseIf<? extends TableIf>>> getIdToCata
public Set<CatalogIf> getCopyOfCatalog() {
return new HashSet<>(idToCatalog.values());
}

// In version 2.1.9, the resource in catalog is no longer supported.
// Here we do the following things:
// 1. if this catalog has resource, get all resource properties and add them to catalog properties.
// 2. remove resource from catalog property
public void removeCatalogResource() {
for (CatalogIf catalog : idToCatalog.values()) {
if (!(catalog instanceof ExternalCatalog)) {
continue;
}
((ExternalCatalog) catalog).removeResource();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,12 @@

import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.Resource;
import org.apache.doris.catalog.Resource.ReferenceType;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.io.Writable;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.datasource.property.metastore.MetastoreProperties;
import org.apache.doris.datasource.property.storage.StorageProperties;
import org.apache.doris.persist.gson.GsonUtils;

import com.google.common.base.Strings;
Expand All @@ -35,6 +38,7 @@
import java.io.DataOutput;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
Expand All @@ -45,53 +49,35 @@
public class CatalogProperty implements Writable {
private static final Logger LOG = LogManager.getLogger(CatalogProperty.class);

@Deprecated
@SerializedName(value = "resource")
private String resource;
@SerializedName(value = "properties")
private Map<String, String> properties;

private volatile Resource catalogResource = null;
private MetastoreProperties metastoreProperties;
private List<StorageProperties> storagePropertiesList;

public CatalogProperty(String resource, Map<String, String> properties) {
this.resource = Strings.nullToEmpty(resource);
this.properties = properties;
if (this.properties == null) {
this.properties = Maps.newConcurrentMap();
}
metastoreProperties = MetastoreProperties.create(this.properties);
storagePropertiesList = StorageProperties.create(this.properties);
}

private Resource catalogResource() {
if (!Strings.isNullOrEmpty(resource) && catalogResource == null) {
synchronized (this) {
if (catalogResource == null) {
catalogResource = Env.getCurrentEnv().getResourceMgr().getResource(resource);
}
}
}
return catalogResource;
public String getOrDefault(String key, String defaultVal) {
return properties.getOrDefault(key, defaultVal);
}

public String getOrDefault(String key, String defaultVal) {
String val = properties.get(key);
if (val == null) {
Resource res = catalogResource();
if (res != null) {
val = res.getCopiedProperties().getOrDefault(key, defaultVal);
} else {
val = defaultVal;
}
}
return val;
public boolean containsProperty(String key) {
return properties.containsKey(key);
}

public Map<String, String> getProperties() {
Map<String, String> mergedProperties = Maps.newHashMap();
if (!Strings.isNullOrEmpty(resource)) {
Resource res = catalogResource();
if (res != null) {
mergedProperties = res.getCopiedProperties();
}
}
mergedProperties.putAll(properties);
return mergedProperties;
}
Expand Down Expand Up @@ -119,6 +105,22 @@ public void deleteProperty(String key) {
this.properties.remove(key);
}

public void removeResource(String catalogName) {
if (Strings.isNullOrEmpty(this.resource)) {
return;
}
Resource resourceObj = Env.getCurrentEnv().getResourceMgr().getResource(this.resource);
if (resourceObj != null) {
Map<String, String> resourceProperties = resourceObj.getCopiedProperties();
for (Map.Entry<String, String> entry : resourceProperties.entrySet()) {
properties.put(entry.getKey(), entry.getValue());
}
this.resource = "";
resourceObj.removeReference(catalogName, ReferenceType.CATALOG);
LOG.warn("remove resource {} from catalog {}", resourceObj.getName(), catalogName);
}
}

@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, GsonUtils.GSON.toJson(this));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -318,24 +318,17 @@ public boolean isInitialized() {
// check if all required properties are set when creating catalog
public void checkProperties() throws DdlException {
// check refresh parameter of catalog
Map<String, String> properties = getCatalogProperty().getProperties();
if (properties.containsKey(CatalogMgr.METADATA_REFRESH_INTERVAL_SEC)) {
if (catalogProperty.containsProperty(CatalogMgr.METADATA_REFRESH_INTERVAL_SEC)) {
try {
Integer metadataRefreshIntervalSec = Integer.valueOf(
properties.get(CatalogMgr.METADATA_REFRESH_INTERVAL_SEC));
catalogProperty.getOrDefault(CatalogMgr.METADATA_REFRESH_INTERVAL_SEC, "-1"));
if (metadataRefreshIntervalSec < 0) {
throw new DdlException("Invalid properties: " + CatalogMgr.METADATA_REFRESH_INTERVAL_SEC);
}
} catch (NumberFormatException e) {
throw new DdlException("Invalid properties: " + CatalogMgr.METADATA_REFRESH_INTERVAL_SEC);
}
}

// if (properties.getOrDefault(ExternalCatalog.USE_META_CACHE, "true").equals("false")) {
// LOG.warn("force to set use_meta_cache to true for catalog: {} when creating", name);
// getCatalogProperty().addProperty(ExternalCatalog.USE_META_CACHE, "true");
// useMetaCache = Optional.of(true);
// }
}

/**
Expand Down Expand Up @@ -1031,7 +1024,7 @@ public String getMetaNamesMapping() {
}

public String bindBrokerName() {
return catalogProperty.getProperties().get(HMSExternalCatalog.BIND_BROKER_NAME);
return catalogProperty.getOrDefault(HMSExternalCatalog.BIND_BROKER_NAME, "");
}

// ATTN: this method only return all cached databases.
Expand All @@ -1058,13 +1051,8 @@ public Collection<DatabaseIf<? extends TableIf>> getAllDbs() {
public boolean enableAutoAnalyze() {
// By default, external catalog disables auto analyze, users could set catalog property to enable it:
// "enable.auto.analyze" = "true"
Map<String, String> properties = catalogProperty.getProperties();
boolean ret = false;
if (properties.containsKey(ENABLE_AUTO_ANALYZE)
&& properties.get(ENABLE_AUTO_ANALYZE).equalsIgnoreCase("true")) {
ret = true;
}
return ret;
return catalogProperty.getOrDefault(ENABLE_AUTO_ANALYZE, "false")
.equalsIgnoreCase("true");
}

@Override
Expand Down Expand Up @@ -1100,4 +1088,8 @@ public void setAutoAnalyzePolicy(String dbName, String tableName, String policy)
public PreExecutionAuthenticator getPreExecutionAuthenticator() {
return preExecutionAuthenticator;
}

public void removeResource() {
catalogProperty.removeResource(this.name);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ protected List<String> listDatabaseNames() {
public void checkProperties() throws DdlException {
super.checkProperties();
for (String requiredProperty : REQUIRED_PROPERTIES) {
if (!catalogProperty.getProperties().containsKey(requiredProperty)) {
if (!catalogProperty.containsProperty(requiredProperty)) {
throw new DdlException("Required property '" + requiredProperty + "' is missing");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ public void checkProperties() throws DdlException {
throw new DdlException(
"The parameter " + FILE_META_CACHE_TTL_SECOND + " is wrong, value is " + fileMetaCacheTtlSecond);
}

// check the dfs.ha properties
// 'dfs.nameservices'='your-nameservice',
// 'dfs.ha.namenodes.your-nameservice'='nn1,nn2',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ private Configuration replaceS3Properties(Configuration conf) {
}

private Map<String, String> convertToRestCatalogProperties() {

Map<String, String> props = catalogProperty.getProperties();
Map<String, String> restProperties = new HashMap<>(props);
restProperties.put(CatalogUtil.ICEBERG_CATALOG_TYPE, CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public JdbcExternalCatalog(long catalogId, String name, String resource, Map<Str
public void checkProperties() throws DdlException {
super.checkProperties();
for (String requiredProperty : REQUIRED_PROPERTIES) {
if (!catalogProperty.getProperties().containsKey(requiredProperty)) {
if (!catalogProperty.containsProperty(requiredProperty)) {
throw new DdlException("Required property '" + requiredProperty + "' is missing");
}
}
Expand All @@ -113,12 +113,12 @@ public void checkProperties() throws DdlException {
public void setDefaultPropsIfMissing(boolean isReplay) {
super.setDefaultPropsIfMissing(isReplay);
// Modify lower_case_table_names to lower_case_meta_names if it exists
if (catalogProperty.getProperties().containsKey("lower_case_table_names") && isReplay) {
String lowerCaseTableNamesValue = catalogProperty.getProperties().get("lower_case_table_names");
if (catalogProperty.containsProperty("lower_case_table_names") && isReplay) {
String lowerCaseTableNamesValue = catalogProperty.getOrDefault("lower_case_table_names", "");
catalogProperty.addProperty("lower_case_meta_names", lowerCaseTableNamesValue);
catalogProperty.deleteProperty("lower_case_table_names");
LOG.info("Modify lower_case_table_names to lower_case_meta_names, value: {}", lowerCaseTableNamesValue);
} else if (catalogProperty.getProperties().containsKey("lower_case_table_names") && !isReplay) {
} else if (catalogProperty.containsProperty("lower_case_table_names") && !isReplay) {
throw new IllegalArgumentException("Jdbc catalog property lower_case_table_names is not supported,"
+ " please use lower_case_meta_names instead.");
}
Expand Down Expand Up @@ -300,11 +300,11 @@ public List<Column> listColumns(String remoteDbName, String remoteTblName) {
@Override
public void checkWhenCreating() throws DdlException {
super.checkWhenCreating();
Map<String, String> properties = catalogProperty.getProperties();
if (properties.containsKey(JdbcResource.DRIVER_URL)) {
String computedChecksum = JdbcResource.computeObjectChecksum(properties.get(JdbcResource.DRIVER_URL));
if (properties.containsKey(JdbcResource.CHECK_SUM)) {
String providedChecksum = properties.get(JdbcResource.CHECK_SUM);
if (catalogProperty.containsProperty(JdbcResource.DRIVER_URL)) {
String computedChecksum = JdbcResource.computeObjectChecksum(
catalogProperty.getOrDefault(JdbcResource.DRIVER_URL, ""));
if (catalogProperty.containsProperty(JdbcResource.CHECK_SUM)) {
String providedChecksum = catalogProperty.getOrDefault(JdbcResource.CHECK_SUM, "");
if (!providedChecksum.equals(computedChecksum)) {
throw new DdlException(
"The provided checksum (" + providedChecksum
Expand Down
Loading
Loading