diff --git a/nifi-assembly/pom.xml b/nifi-assembly/pom.xml index 8aaf2463910d..3a3c092e3ef4 100644 --- a/nifi-assembly/pom.xml +++ b/nifi-assembly/pom.xml @@ -363,6 +363,12 @@ language governing permissions and limitations under the License. --> 2.2.0-SNAPSHOT nar + + org.apache.nifi + nifi-database-dialect-service-nar + 2.2.0-SNAPSHOT + nar + org.apache.nifi nifi-mongodb-client-service-api-nar diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/pom.xml b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/pom.xml index 19435d2fb44a..9e4d328774ce 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/pom.xml +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/pom.xml @@ -31,6 +31,10 @@ org.apache.nifi nifi-dbcp-service-api + + org.apache.nifi + nifi-database-dialect-service-api + org.apache.nifi nifi-web-client-provider-api diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/main/java/org/apache/nifi/parameter/DatabaseParameterProvider.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/main/java/org/apache/nifi/parameter/DatabaseParameterProvider.java index c2f429316f9c..e8c692e52944 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/main/java/org/apache/nifi/parameter/DatabaseParameterProvider.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-parameter-providers/src/main/java/org/apache/nifi/parameter/DatabaseParameterProvider.java @@ -21,12 +21,24 @@ import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.ConfigVerificationResult; import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.context.PropertyContext; import org.apache.nifi.controller.ConfigurationContext; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.QueryClause; +import org.apache.nifi.database.dialect.service.api.QueryClauseType; +import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardQueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.util.StandardValidators; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; -import org.apache.nifi.util.StringUtils; +import org.apache.nifi.processors.standard.db.DatabaseAdapterDescriptor; +import org.apache.nifi.processors.standard.db.impl.DatabaseAdapterDatabaseDialectService; +import org.apache.nifi.processors.standard.db.impl.DatabaseDialectServiceDatabaseAdapter; import java.sql.Connection; import java.sql.ResultSet; @@ -38,7 +50,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.ServiceLoader; +import java.util.Optional; import java.util.stream.Collectors; @Tags({"database", "dbcp", "sql"}) @@ -46,29 +58,9 @@ public class DatabaseParameterProvider extends AbstractParameterProvider implements VerifiableParameterProvider { - protected final static Map dbAdapters = new HashMap<>(); - - public static final PropertyDescriptor DB_TYPE; - - static { - // Load the DatabaseAdapters - ArrayList dbAdapterValues = new ArrayList<>(); - ServiceLoader dbAdapterLoader = ServiceLoader.load(DatabaseAdapter.class); - dbAdapterLoader.forEach(it -> { - dbAdapters.put(it.getName(), it); - dbAdapterValues.add(new AllowableValue(it.getName(), it.getName(), it.getDescription())); - }); - - DB_TYPE = new PropertyDescriptor.Builder() - .name("db-type") - .displayName("Database Type") - .description("The type/flavor of database, used for generating database-specific code. In many cases the Generic type " - + "should suffice, but some databases (such as Oracle) require custom SQL clauses. ") - .allowableValues(dbAdapterValues.toArray(new AllowableValue[dbAdapterValues.size()])) - .defaultValue("Generic") - .required(true) - .build(); - } + public static final PropertyDescriptor DB_TYPE = DatabaseAdapterDescriptor.getDatabaseTypeDescriptor("db-type"); + + public static final PropertyDescriptor DATABASE_DIALECT_SERVICE = DatabaseAdapterDescriptor.getDatabaseDialectServiceDescriptor(DB_TYPE); static AllowableValue GROUPING_BY_COLUMN = new AllowableValue("grouping-by-column", "Column", "A single table is partitioned by the 'Parameter Group Name Column'. All rows with the same value in this column will " + @@ -149,6 +141,7 @@ public class DatabaseParameterProvider extends AbstractParameterProvider impleme protected void init(final ParameterProviderInitializationContext config) { final List properties = new ArrayList<>(); properties.add(DB_TYPE); + properties.add(DATABASE_DIALECT_SERVICE); properties.add(DBCP_SERVICE); properties.add(PARAMETER_GROUPING_STRATEGY); properties.add(TABLE_NAME); @@ -233,8 +226,22 @@ private void validateValueNotNull(final String value, final String columnName) { } String getQuery(final ConfigurationContext context, final String tableName, final List columns, final String whereClause) { - final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); - return dbAdapter.getSelectStatement(tableName, StringUtils.join(columns, ", "), whereClause, null, null, null); + final DatabaseDialectService databaseDialectService = getDatabaseDialectService(context); + + final List columnDefinitions = columns.stream() + .map(StandardColumnDefinition::new) + .map(ColumnDefinition.class::cast) + .toList(); + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, columnDefinitions); + final QueryStatementRequest queryStatementRequest = new StandardQueryStatementRequest( + StatementType.SELECT, + tableDefinition, + Optional.empty(), + List.of(new QueryClause(QueryClauseType.WHERE, whereClause)), + Optional.empty() + ); + final StatementResponse statementResponse = databaseDialectService.getStatement(queryStatementRequest); + return statementResponse.sql(); } @Override @@ -262,4 +269,15 @@ public List verify(final ConfigurationContext context, return results; } + + private DatabaseDialectService getDatabaseDialectService(final PropertyContext context) { + final DatabaseDialectService databaseDialectService; + final String databaseType = context.getProperty(DB_TYPE).getValue(); + if (DatabaseDialectServiceDatabaseAdapter.NAME.equals(databaseType)) { + databaseDialectService = context.getProperty(DATABASE_DIALECT_SERVICE).asControllerService(DatabaseDialectService.class); + } else { + databaseDialectService = new DatabaseAdapterDatabaseDialectService(databaseType); + } + return databaseDialectService; + } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/pom.xml b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/pom.xml index ebdda2e7799e..77f2bac61dba 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/pom.xml +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/pom.xml @@ -390,6 +390,10 @@ nifi-dbcp-service-api 2.2.0-SNAPSHOT + + org.apache.nifi + nifi-database-dialect-service-api + com.squareup.okhttp3 okhttp diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java index 2e01ee1e74f9..c1d607698297 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java @@ -16,10 +16,20 @@ */ package org.apache.nifi.processors.standard; -import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.context.PropertyContext; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.QueryClause; +import org.apache.nifi.database.dialect.service.api.QueryClauseType; +import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardQueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; @@ -29,7 +39,9 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; +import org.apache.nifi.processors.standard.db.DatabaseAdapterDescriptor; +import org.apache.nifi.processors.standard.db.impl.DatabaseAdapterDatabaseDialectService; +import org.apache.nifi.processors.standard.db.impl.DatabaseDialectServiceDatabaseAdapter; import org.apache.nifi.processors.standard.db.impl.PhoenixDatabaseAdapter; import org.apache.nifi.util.StringUtils; @@ -50,13 +62,14 @@ import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.ServiceLoader; +import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; @@ -183,9 +196,9 @@ public abstract class AbstractDatabaseFetchProcessor extends AbstractSessionFact // The delimiter to use when referencing qualified names (such as table@!@column in the state map) protected static final String NAMESPACE_DELIMITER = "@!@"; - public static final PropertyDescriptor DB_TYPE; + public static final PropertyDescriptor DB_TYPE = DatabaseAdapterDescriptor.getDatabaseTypeDescriptor("db-fetch-db-type"); + static final PropertyDescriptor DATABASE_DIALECT_SERVICE = DatabaseAdapterDescriptor.getDatabaseDialectServiceDescriptor(DB_TYPE); - protected final static Map dbAdapters = new HashMap<>(); protected final Map columnTypeMap = new HashMap<>(); // This value is set when the processor is scheduled and indicates whether the Table Name property contains Expression Language. @@ -204,29 +217,11 @@ public abstract class AbstractDatabaseFetchProcessor extends AbstractSessionFact private static final DateTimeFormatter TIME_TYPE_FORMAT = DateTimeFormatter.ofPattern("HH:mm:ss.SSS"); + private static final QueryClause ZERO_RESULT_WHERE_CLAUSE = new QueryClause(QueryClauseType.WHERE, "1 = 0"); + // A Map (name to value) of initial maximum-value properties, filled at schedule-time and used at trigger-time protected Map maxValueProperties; - static { - // Load the DatabaseAdapters - ArrayList dbAdapterValues = new ArrayList<>(); - ServiceLoader dbAdapterLoader = ServiceLoader.load(DatabaseAdapter.class); - dbAdapterLoader.forEach(it -> { - dbAdapters.put(it.getName(), it); - dbAdapterValues.add(new AllowableValue(it.getName(), it.getName(), it.getDescription())); - }); - - DB_TYPE = new PropertyDescriptor.Builder() - .name("db-fetch-db-type") - .displayName("Database Type") - .description("The type/flavor of database, used for generating database-specific code. In many cases the Generic type " - + "should suffice, but some databases (such as Oracle) require custom SQL clauses. ") - .allowableValues(dbAdapterValues.toArray(new AllowableValue[dbAdapterValues.size()])) - .defaultValue("Generic") - .required(true) - .build(); - } - // A common validation procedure for DB fetch processors, it stores whether the Table Name and/or Max Value Column properties have expression language protected Collection customValidate(ValidationContext validationContext) { // For backwards-compatibility, keep track of whether the table name and max-value column properties are dynamic (i.e. has expression language) @@ -241,6 +236,8 @@ public void setup(final ProcessContext context) { } public void setup(final ProcessContext context, boolean shouldCleanCache, FlowFile flowFile) { + final DatabaseDialectService databaseDialectService = getDatabaseDialectService(context); + synchronized (setupComplete) { setupComplete.set(false); final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES).evaluateAttributeExpressions(flowFile).getValue(); @@ -256,23 +253,15 @@ public void setup(final ProcessContext context, boolean shouldCleanCache, FlowFi final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String sqlQuery = context.getProperty(SQL_QUERY).evaluateAttributeExpressions().getValue(); - final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); try (final Connection con = dbcpService.getConnection(flowFile == null ? Collections.emptyMap() : flowFile.getAttributes()); final Statement st = con.createStatement()) { // Try a query that returns no rows, for the purposes of getting metadata about the columns. It is possible // to use DatabaseMetaData.getColumns(), but not all drivers support this, notably the schema-on-read // approach as in Apache Drill - String query; - - if (StringUtils.isEmpty(sqlQuery)) { - query = dbAdapter.getSelectStatement(tableName, maxValueColumnNames, "1 = 0", null, null, null); - } else { - StringBuilder sbQuery = getWrappedQuery(dbAdapter, sqlQuery, tableName); - sbQuery.append(" WHERE 1=0"); - - query = sbQuery.toString(); - } + final QueryStatementRequest statementRequest = getMaxValueStatementRequest(tableName, maxValueColumnNames, sqlQuery); + final StatementResponse statementResponse = databaseDialectService.getStatement(statementRequest); + final String query = statementResponse.sql(); ResultSet resultSet = st.executeQuery(query); ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); @@ -286,13 +275,13 @@ public void setup(final ProcessContext context, boolean shouldCleanCache, FlowFi final List maxValueQualifiedColumnNameList = new ArrayList<>(); for (String maxValueColumn : maxValueColumnNameList) { - String colKey = getStateKey(tableName, maxValueColumn.trim(), dbAdapter); + String colKey = getStateKey(tableName, maxValueColumn.trim()); maxValueQualifiedColumnNameList.add(colKey); } for (int i = 1; i <= numCols; i++) { String colName = resultSetMetaData.getColumnName(i).toLowerCase(); - String colKey = getStateKey(tableName, colName, dbAdapter); + String colKey = getStateKey(tableName, colName); //only include columns that are part of the maximum value tracking column list if (!maxValueQualifiedColumnNameList.contains(colKey)) { @@ -304,7 +293,7 @@ public void setup(final ProcessContext context, boolean shouldCleanCache, FlowFi } for (String maxValueColumn : maxValueColumnNameList) { - String colKey = getStateKey(tableName, maxValueColumn.trim().toLowerCase(), dbAdapter); + String colKey = getStateKey(tableName, maxValueColumn.trim().toLowerCase()); if (!columnTypeMap.containsKey(colKey)) { throw new ProcessException("Column not found in the table/query specified: " + maxValueColumn); } @@ -320,15 +309,37 @@ public void setup(final ProcessContext context, boolean shouldCleanCache, FlowFi } } - protected static StringBuilder getWrappedQuery(DatabaseAdapter dbAdapter, String sqlQuery, String tableName) { - return new StringBuilder("SELECT * FROM (" + sqlQuery + ") " + dbAdapter.getTableAliasClause(tableName)); + protected DatabaseDialectService getDatabaseDialectService(final PropertyContext context) { + final DatabaseDialectService databaseDialectService; + final String databaseType = context.getProperty(DB_TYPE).getValue(); + if (DatabaseDialectServiceDatabaseAdapter.NAME.equals(databaseType)) { + databaseDialectService = context.getProperty(DATABASE_DIALECT_SERVICE).asControllerService(DatabaseDialectService.class); + } else { + databaseDialectService = new DatabaseAdapterDatabaseDialectService(databaseType); + } + return databaseDialectService; + } + + private QueryStatementRequest getMaxValueStatementRequest(final String tableName, final String maxValueColumnNames, final String derivedTableQuery) { + final List maxValueColumns = Arrays.stream(maxValueColumnNames.split(",")) + .map(StandardColumnDefinition::new) + .map(ColumnDefinition.class::cast) + .toList(); + + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, maxValueColumns); + return new StandardQueryStatementRequest( + StatementType.SELECT, + tableDefinition, + Optional.ofNullable(derivedTableQuery), + List.of(ZERO_RESULT_WHERE_CLAUSE), + Optional.empty() + ); } protected static String getMaxValueFromRow(ResultSet resultSet, int columnIndex, Integer type, - String maxValueString, - String databaseType) + String maxValueString) throws ParseException, IOException, SQLException { // Skip any columns we're not keeping track of or whose value is null @@ -520,17 +531,18 @@ protected static String getLiteralByType(int type, String value, String database * Construct a key string for a corresponding state value. * @param prefix A prefix may contain database and table name, or just table name, this can be null * @param columnName A column name - * @param adapter DatabaseAdapter is used to unwrap identifiers * @return a state key string */ - protected static String getStateKey(String prefix, String columnName, DatabaseAdapter adapter) { + protected static String getStateKey(String prefix, String columnName) { StringBuilder sb = new StringBuilder(); if (prefix != null) { - sb.append(adapter.unwrapIdentifier(prefix.toLowerCase())); + final String prefixUnwrapped = prefix.toLowerCase().replaceAll("[\"`\\[\\]]", ""); + sb.append(prefixUnwrapped); sb.append(NAMESPACE_DELIMITER); } if (columnName != null) { - sb.append(adapter.unwrapIdentifier(columnName.toLowerCase())); + final String columnNameUnwrapped = columnName.toLowerCase().replaceAll("[\"`\\[\\]]", ""); + sb.append(columnNameUnwrapped); } return sb.toString(); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java index d005771d5b4d..edb6734b0318 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java @@ -25,6 +25,14 @@ import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardQueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.AttributeExpression; import org.apache.nifi.expression.ExpressionLanguageScope; @@ -36,7 +44,6 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; import org.apache.nifi.processors.standard.sql.SqlWriter; import org.apache.nifi.util.StopWatch; import org.apache.nifi.util.db.JdbcCommon; @@ -56,14 +63,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; import java.util.stream.IntStream; - public abstract class AbstractQueryDatabaseTable extends AbstractDatabaseFetchProcessor { public static final String RESULT_TABLENAME = "tablename"; @@ -214,23 +220,6 @@ protected Collection customValidate(ValidationContext validati .build()); } - final Boolean propertyAutoCommit = validationContext.getProperty(AUTO_COMMIT).evaluateAttributeExpressions().asBoolean(); - final Integer fetchSize = validationContext.getProperty(FETCH_SIZE).evaluateAttributeExpressions().asInteger(); - final DatabaseAdapter dbAdapter = dbAdapters.get(validationContext.getProperty(DB_TYPE).getValue()); - final Boolean adapterAutoCommit = dbAdapter == null - ? null - : dbAdapter.getAutoCommitForReads(fetchSize).orElse(null); - if (adapterAutoCommit != null && propertyAutoCommit != null - && propertyAutoCommit != adapterAutoCommit ) { - results.add(new ValidationResult.Builder().valid(false) - .subject(AUTO_COMMIT.getDisplayName()) - .input(String.valueOf(propertyAutoCommit)) - .explanation(String.format("'%s' must be set to '%s' because '%s' %s requires it to be '%s'", - AUTO_COMMIT.getDisplayName(), adapterAutoCommit, - dbAdapter.getName(), DB_TYPE.getDisplayName(), adapterAutoCommit)) - .build()); - } - return results; } @@ -258,7 +247,8 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory final ComponentLog logger = getLogger(); final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); - final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); + final DatabaseDialectService databaseDialectService = getDatabaseDialectService(context); + final String databaseType = context.getProperty(DB_TYPE).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions().getValue(); final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions().getValue(); final String sqlQuery = context.getProperty(SQL_QUERY).evaluateAttributeExpressions().getValue(); @@ -296,7 +286,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory //If an initial max value for column(s) has been specified using properties, and this column is not in the state manager, sync them to the state property map for (final Map.Entry maxProp : maxValueProperties.entrySet()) { String maxPropKey = maxProp.getKey().toLowerCase(); - String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey, dbAdapter); + String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey); if (!statePropertyMap.containsKey(fullyQualifiedMaxPropKey)) { String newMaxPropValue; // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme) @@ -317,11 +307,15 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*")); if (maxValueColumnNameList != null && statePropertyMap.isEmpty() && initialLoadStrategy.equals(INITIAL_LOAD_STRATEGY_NEW_ROWS.getValue())) { - final String columnsClause = maxValueColumnNameList.stream() + final List maxValueColumnDefinitions = maxValueColumnNameList.stream() .map(columnName -> String.format("MAX(%s) %s", columnName, columnName)) - .collect(Collectors.joining(", ")); - - final String selectMaxQuery = dbAdapter.getSelectStatement(tableName, columnsClause, null, null, null, null); + .map(StandardColumnDefinition::new) + .map(ColumnDefinition.class::cast) + .toList(); + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, maxValueColumnDefinitions); + final QueryStatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition); + final StatementResponse maxValueStatementResponse = databaseDialectService.getStatement(statementRequest); + final String selectMaxQuery = maxValueStatementResponse.sql(); try (final Connection con = dbcpService.getConnection(Collections.emptyMap()); final Statement st = con.createStatement()) { @@ -334,7 +328,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory try (final ResultSet resultSet = st.executeQuery(selectMaxQuery)) { if (resultSet.next()) { - final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap, dbAdapter); + final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap); maxValCollector.processRow(resultSet); maxValCollector.applyStateChanges(); } @@ -345,7 +339,14 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory } } - final String selectQuery = getQuery(dbAdapter, tableName, sqlQuery, columnNames, maxValueColumnNameList, customWhereClause, statePropertyMap); + final List parsedColumnNames; + if (columnNames == null) { + parsedColumnNames = List.of(); + } else { + parsedColumnNames = Arrays.asList(columnNames.split(", ")); + } + + final String selectQuery = getQuery(databaseDialectService, databaseType, tableName, sqlQuery, parsedColumnNames, maxValueColumnNameList, customWhereClause, statePropertyMap); final StopWatch stopWatch = new StopWatch(true); final String fragmentIdentifier = UUID.randomUUID().toString(); @@ -381,12 +382,8 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory } final boolean originalAutoCommit = con.getAutoCommit(); - final Boolean propertyAutoCommitValue = context.getProperty(AUTO_COMMIT).evaluateAttributeExpressions().asBoolean(); + final Boolean setAutoCommitValue = context.getProperty(AUTO_COMMIT).evaluateAttributeExpressions().asBoolean(); // If user sets AUTO_COMMIT property to non-null (i.e. true or false), then the property value overrides the dbAdapter's value - final Boolean setAutoCommitValue = - dbAdapter == null || propertyAutoCommitValue != null - ? propertyAutoCommitValue - : dbAdapter.getAutoCommitForReads(fetchSize).orElse(null); if (setAutoCommitValue != null && originalAutoCommit != setAutoCommitValue) { try { con.setAutoCommit(setAutoCommitValue); @@ -400,7 +397,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory try (final ResultSet resultSet = st.executeQuery(selectQuery)) { int fragmentIndex = 0; // Max values will be updated in the state property map by the callback - final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap, dbAdapter); + final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap); while (true) { final AtomicLong nrOfRows = new AtomicLong(0L); @@ -530,31 +527,39 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory } } - protected String getQuery(DatabaseAdapter dbAdapter, String tableName, String columnNames, List maxValColumnNames, - String customWhereClause, Map stateMap) { - - return getQuery(dbAdapter, tableName, null, columnNames, maxValColumnNames, customWhereClause, stateMap); - } - - protected String getQuery(DatabaseAdapter dbAdapter, String tableName, String sqlQuery, String columnNames, List maxValColumnNames, - String customWhereClause, Map stateMap) { + private String getQuery( + final DatabaseDialectService databaseDialectService, + final String databaseType, + final String tableName, + final String sqlQuery, + final List columnNames, + final List maxValColumnNames, + final String customWhereClause, + final Map stateMap + ) { if (StringUtils.isEmpty(tableName)) { throw new IllegalArgumentException("Table name must be specified"); } - final StringBuilder query; - if (StringUtils.isEmpty(sqlQuery)) { - query = new StringBuilder(dbAdapter.getSelectStatement(tableName, columnNames, null, null, null, null)); - } else { - query = getWrappedQuery(dbAdapter, sqlQuery, tableName); - } + final Optional derivedTableQuery = Optional.ofNullable(sqlQuery); + + final List columnDefinitions = columnNames.stream() + .map(StandardColumnDefinition::new) + .map(ColumnDefinition.class::cast) + .toList(); + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, columnDefinitions); + final QueryStatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition, derivedTableQuery, List.of(), Optional.empty()); + final StatementResponse statementResponse = databaseDialectService.getStatement(statementRequest); + + final StringBuilder query = new StringBuilder(); + query.append(statementResponse.sql()); List whereClauses = new ArrayList<>(); // Check state map for last max values if (stateMap != null && !stateMap.isEmpty() && maxValColumnNames != null) { IntStream.range(0, maxValColumnNames.size()).forEach((index) -> { String colName = maxValColumnNames.get(index); - String maxValueKey = getStateKey(tableName, colName, dbAdapter); + String maxValueKey = getStateKey(tableName, colName); String maxValue = stateMap.get(maxValueKey); if (StringUtils.isEmpty(maxValue)) { // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme) @@ -569,7 +574,7 @@ protected String getQuery(DatabaseAdapter dbAdapter, String tableName, String sq throw new IllegalArgumentException("No column type found for: " + colName); } // Add a condition for the WHERE clause - whereClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, dbAdapter.getName())); + whereClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, databaseType)); } }); } @@ -587,13 +592,11 @@ protected String getQuery(DatabaseAdapter dbAdapter, String tableName, String sq } public class MaxValueResultSetRowCollector implements JdbcCommon.ResultSetRowCallback { - DatabaseAdapter dbAdapter; final Map newColMap; final Map originalState; String tableName; - public MaxValueResultSetRowCollector(String tableName, Map stateMap, DatabaseAdapter dbAdapter) { - this.dbAdapter = dbAdapter; + public MaxValueResultSetRowCollector(String tableName, Map stateMap) { this.originalState = stateMap; this.newColMap = new HashMap<>(); @@ -614,7 +617,7 @@ public void processRow(ResultSet resultSet) throws IOException { if (nrOfColumns > 0) { for (int i = 1; i <= nrOfColumns; i++) { String colName = meta.getColumnName(i).toLowerCase(); - String fullyQualifiedMaxValueKey = getStateKey(tableName, colName, dbAdapter); + String fullyQualifiedMaxValueKey = getStateKey(tableName, colName); Integer type = columnTypeMap.get(fullyQualifiedMaxValueKey); // Skip any columns we're not keeping track of or whose value is null if (type == null || resultSet.getObject(i) == null) { @@ -627,7 +630,7 @@ public void processRow(ResultSet resultSet) throws IOException { if (StringUtils.isEmpty(maxValueString)) { maxValueString = newColMap.get(colName); } - String newMaxValueString = getMaxValueFromRow(resultSet, i, type, maxValueString, dbAdapter.getName()); + String newMaxValueString = getMaxValueFromRow(resultSet, i, type, maxValueString); if (newMaxValueString != null) { newColMap.put(fullyQualifiedMaxValueKey, newMaxValueString); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java index 56cae799af02..c38b70691938 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java @@ -35,6 +35,18 @@ import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.PageRequest; +import org.apache.nifi.database.dialect.service.api.QueryClause; +import org.apache.nifi.database.dialect.service.api.QueryClauseType; +import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardPageRequest; +import org.apache.nifi.database.dialect.service.api.StandardQueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.AttributeExpression; import org.apache.nifi.expression.ExpressionLanguageScope; @@ -46,7 +58,6 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; import java.io.IOException; import java.sql.Connection; @@ -62,12 +73,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.OptionalLong; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; - @TriggerSerially @InputRequirement(Requirement.INPUT_ALLOWED) @Tags({"sql", "select", "jdbc", "query", "database", "fetch", "generate"}) @@ -167,6 +179,7 @@ public class GenerateTableFetch extends AbstractDatabaseFetchProcessor { private static final List PROPERTIES = List.of( DBCP_SERVICE, DB_TYPE, + DATABASE_DIALECT_SERVICE, TABLE_NAME, COLUMN_NAMES, MAX_VALUE_COLUMN_NAMES, @@ -266,7 +279,9 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory final ComponentLog logger = getLogger(); final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); - final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); + final DatabaseDialectService databaseDialectService = getDatabaseDialectService(context); + final String databaseType = context.getProperty(DB_TYPE).getValue(); + final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(fileToProcess).getValue(); final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions(fileToProcess).getValue(); final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES).evaluateAttributeExpressions(fileToProcess).getValue(); @@ -297,7 +312,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory // If an initial max value for column(s) has been specified using properties, and this column is not in the state manager, sync them to the state property map for (final Map.Entry maxProp : maxValueProperties.entrySet()) { String maxPropKey = maxProp.getKey().toLowerCase(); - String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey, dbAdapter); + String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey); if (!statePropertyMap.containsKey(fullyQualifiedMaxPropKey)) { String newMaxPropValue; // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme) @@ -316,7 +331,6 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory // executed SQL query will retrieve the count of all records after the filter(s) have been applied, as well as the new maximum values for the // specified columns. This allows the processor to generate the correctly partitioned SQL statements as well as to update the state with the // latest observed maximum values. - String whereClause = null; List maxValueColumnNameList = StringUtils.isEmpty(maxValueColumnNames) ? new ArrayList<>(0) : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*")); @@ -326,7 +340,6 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory Long maxValueForPartitioning = null; Long minValueForPartitioning = null; - String columnsClause = null; List maxValueSelectColumns = new ArrayList<>(numMaxValueColumns + 1); // replace unnecessary row count with -1 stub value when column values for paging is used, or when partition size is zero. @@ -341,16 +354,16 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory String colName = maxValueColumnNameList.get(index); maxValueSelectColumns.add("MAX(" + colName + ") " + colName); - String maxValue = getColumnStateMaxValue(tableName, statePropertyMap, colName, dbAdapter); + String maxValue = getColumnStateMaxValue(tableName, statePropertyMap, colName); if (!StringUtils.isEmpty(maxValue)) { - if (columnTypeMap.isEmpty() || getColumnType(tableName, colName, dbAdapter) == null) { + if (columnTypeMap.isEmpty() || getColumnType(tableName, colName) == null) { // This means column type cache is clean after instance reboot. We should re-cache column type super.setup(context, false, finalFileToProcess); } - Integer type = getColumnType(tableName, colName, dbAdapter); + Integer type = getColumnType(tableName, colName); // Add a condition for the WHERE clause - maxValueClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, dbAdapter.getName())); + maxValueClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, databaseType)); } }); @@ -369,17 +382,18 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory maxValueClauses.add("(" + customWhereClause + ")"); } - whereClause = StringUtils.join(maxValueClauses, " AND "); - columnsClause = StringUtils.join(maxValueSelectColumns, ", "); + final String maxWhereClause = StringUtils.join(maxValueClauses, " AND "); + final QueryStatementRequest queryStatementRequest = getMaxColumnStatementRequest(tableName, maxValueSelectColumns, maxWhereClause); // Build a SELECT query with maximum-value columns (if present) - final String selectQuery = dbAdapter.getSelectStatement(tableName, columnsClause, whereClause, null, null, null); - long rowCount = 0; + final StatementResponse statementResponse = databaseDialectService.getStatement(queryStatementRequest); + final String selectQuery = statementResponse.sql(); + long rowCount; try (final Connection con = dbcpService.getConnection(finalFileToProcess == null ? Collections.emptyMap() : finalFileToProcess.getAttributes()); final Statement st = con.createStatement()) { - final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions(fileToProcess).asTimePeriod(TimeUnit.SECONDS).intValue(); + final int queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions(fileToProcess).asTimePeriod(TimeUnit.SECONDS).intValue(); st.setQueryTimeout(queryTimeout); // timeout in seconds logger.debug("Executing {}", selectQuery); @@ -399,7 +413,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory // Since this column has been aliased lets check the label first, // if there is no label we'll use the column name. String resultColumnName = (StringUtils.isNotEmpty(rsmd.getColumnLabel(i)) ? rsmd.getColumnLabel(i) : rsmd.getColumnName(i)).toLowerCase(); - String fullyQualifiedStateKey = getStateKey(tableName, resultColumnName, dbAdapter); + String fullyQualifiedStateKey = getStateKey(tableName, resultColumnName); String resultColumnCurrentMax = statePropertyMap.get(fullyQualifiedStateKey); if (StringUtils.isEmpty(resultColumnCurrentMax) && !isDynamicTableName) { // If we can't find the value at the fully-qualified key name and the table name is static, it is possible (under a previous scheme) @@ -414,7 +428,7 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory columnTypeMap.put(fullyQualifiedStateKey, type); } try { - String newMaxValue = getMaxValueFromRow(resultSet, i, type, resultColumnCurrentMax, dbAdapter.getName()); + String newMaxValue = getMaxValueFromRow(resultSet, i, type, resultColumnCurrentMax); if (newMaxValue != null) { statePropertyMap.put(fullyQualifiedStateKey, newMaxValue); } @@ -440,16 +454,16 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory IntStream.range(0, numMaxValueColumns).forEach((index) -> { String colName = maxValueColumnNameList.get(index); - String maxValue = getColumnStateMaxValue(tableName, statePropertyMap, colName, dbAdapter); + String maxValue = getColumnStateMaxValue(tableName, statePropertyMap, colName); if (!StringUtils.isEmpty(maxValue)) { - if (columnTypeMap.isEmpty() || getColumnType(tableName, colName, dbAdapter) == null) { + if (columnTypeMap.isEmpty() || getColumnType(tableName, colName) == null) { // This means column type cache is clean after instance reboot. We should re-cache column type super.setup(context, false, finalFileToProcess); } - Integer type = getColumnType(tableName, colName, dbAdapter); + Integer type = getColumnType(tableName, colName); // Add a condition for the WHERE clause - maxValueClauses.add(colName + " <= " + getLiteralByType(type, maxValue, dbAdapter.getName())); + maxValueClauses.add(colName + " <= " + getLiteralByType(type, maxValue, databaseType)); } }); @@ -484,8 +498,8 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory FlowFile emptyFlowFile = (fileToProcess == null) ? session.create() : session.create(fileToProcess); Map attributesToAdd = new HashMap<>(); - whereClause = maxValueClauses.isEmpty() ? "1=1" : StringUtils.join(maxValueClauses, " AND "); - attributesToAdd.put("generatetablefetch.whereClause", whereClause); + final String fetchWhereClause = maxValueClauses.isEmpty() ? "1=1" : StringUtils.join(maxValueClauses, " AND "); + attributesToAdd.put("generatetablefetch.whereClause", fetchWhereClause); attributesToAdd.put("generatetablefetch.limit", null); if (partitionSize != 0) { @@ -507,12 +521,22 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory } //Update WHERE list to include new right hand boundaries - whereClause = maxValueClauses.isEmpty() ? "1=1" : StringUtils.join(maxValueClauses, " AND "); + final String whereClause = maxValueClauses.isEmpty() ? "1=1" : StringUtils.join(maxValueClauses, " AND "); Long offset = partitionSize == 0 ? null : i * partitionSize + (useColumnValsForPaging ? minValueForPartitioning : 0); // Don't use an ORDER BY clause if there's only one partition final String orderByClause = partitionSize == 0 ? null : (maxColumnNames.isEmpty() ? customOrderByColumn : maxColumnNames); - final String query = dbAdapter.getSelectStatement(tableName, columnNames, whereClause, orderByClause, limit, offset, columnForPartitioning); + final List namedColumns; + if (columnNames == null) { + namedColumns = List.of(); + } else { + namedColumns = Arrays.asList(columnNames.split(", ")); + } + + final QueryStatementRequest selectStatementRequest = getSelectStatementRequest(tableName, namedColumns, whereClause, orderByClause, offset, limit, columnForPartitioning); + final StatementResponse selectStatementResponse = databaseDialectService.getStatement(selectStatementRequest); + final String query = selectStatementResponse.sql(); + FlowFile sqlFlowFile = (fileToProcess == null) ? session.create() : session.create(fileToProcess); sqlFlowFile = session.write(sqlFlowFile, out -> out.write(query.getBytes())); Map attributesToAdd = new HashMap<>(); @@ -567,23 +591,78 @@ public void onTrigger(final ProcessContext context, final ProcessSessionFactory } } - private String getColumnStateMaxValue(String tableName, Map statePropertyMap, String colName, DatabaseAdapter adapter) { - final String fullyQualifiedStateKey = getStateKey(tableName, colName, adapter); + private QueryStatementRequest getMaxColumnStatementRequest(final String tableName, final List maxValueSelectColumns, final String whereClause) { + final List maxValueColumns = maxValueSelectColumns.stream() + .map(StandardColumnDefinition::new) + .map(ColumnDefinition.class::cast) + .toList(); + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, maxValueColumns); + return new StandardQueryStatementRequest( + StatementType.SELECT, + tableDefinition, + Optional.empty(), + List.of(new QueryClause(QueryClauseType.WHERE, whereClause)), + Optional.empty() + ); + } + + private QueryStatementRequest getSelectStatementRequest( + final String tableName, + final List namedColumns, + final String whereClause, + final String orderByClause, + final Long offset, + final Long limit, + final String indexColumnName + ) { + final List maxValueColumns = (namedColumns).stream() + .map(StandardColumnDefinition::new) + .map(ColumnDefinition.class::cast) + .toList(); + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, maxValueColumns); + + final List queryClauses = new ArrayList<>(); + if (orderByClause != null) { + final QueryClause queryClause = new QueryClause(QueryClauseType.ORDER_BY, orderByClause); + queryClauses.add(queryClause); + } + final QueryClause whereQueryClause = new QueryClause(QueryClauseType.WHERE, whereClause); + queryClauses.add(whereQueryClause); + + final PageRequest pageRequest; + if (offset == null) { + pageRequest = null; + } else { + final OptionalLong pageLimit = limit == null ? OptionalLong.empty() : OptionalLong.of(limit); + pageRequest = new StandardPageRequest(offset, pageLimit, Optional.ofNullable(indexColumnName)); + } + + return new StandardQueryStatementRequest( + StatementType.SELECT, + tableDefinition, + Optional.empty(), + queryClauses, + Optional.ofNullable(pageRequest) + ); + } + + private String getColumnStateMaxValue(String tableName, Map statePropertyMap, String colName) { + final String fullyQualifiedStateKey = getStateKey(tableName, colName); String maxValue = statePropertyMap.get(fullyQualifiedStateKey); if (StringUtils.isEmpty(maxValue) && !isDynamicTableName) { // If the table name is static and the fully-qualified key was not found, try just the column name - maxValue = statePropertyMap.get(getStateKey(null, colName, adapter)); + maxValue = statePropertyMap.get(getStateKey(null, colName)); } return maxValue; } - private Integer getColumnType(String tableName, String colName, DatabaseAdapter adapter) { - final String fullyQualifiedStateKey = getStateKey(tableName, colName, adapter); + private Integer getColumnType(String tableName, String colName) { + final String fullyQualifiedStateKey = getStateKey(tableName, colName); Integer type = columnTypeMap.get(fullyQualifiedStateKey); if (type == null && !isDynamicTableName) { // If the table name is static and the fully-qualified key was not found, try just the column name - type = columnTypeMap.get(getStateKey(null, colName, adapter)); + type = columnTypeMap.get(getStateKey(null, colName)); } return type; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java index 19192e57df86..fe2da9154315 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java @@ -49,7 +49,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.ServiceLoader; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -71,6 +70,15 @@ import org.apache.nifi.components.PropertyDescriptor.Builder; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; +import org.apache.nifi.context.PropertyContext; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.StandardStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; @@ -82,11 +90,13 @@ import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processor.util.pattern.RollbackOnFailure; import org.apache.nifi.processors.standard.db.ColumnDescription; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; +import org.apache.nifi.processors.standard.db.DatabaseAdapterDescriptor; import org.apache.nifi.processors.standard.db.NameNormalizer; import org.apache.nifi.processors.standard.db.NameNormalizerFactory; import org.apache.nifi.processors.standard.db.TableSchema; import org.apache.nifi.processors.standard.db.TranslationStrategy; +import org.apache.nifi.processors.standard.db.impl.DatabaseAdapterDatabaseDialectService; +import org.apache.nifi.processors.standard.db.impl.DatabaseDialectServiceDatabaseAdapter; import org.apache.nifi.record.path.FieldValue; import org.apache.nifi.record.path.RecordPath; import org.apache.nifi.record.path.RecordPathResult; @@ -425,63 +435,42 @@ public class PutDatabaseRecord extends AbstractProcessor { .required(false) .build(); - static final PropertyDescriptor DB_TYPE; + static final PropertyDescriptor DB_TYPE = DatabaseAdapterDescriptor.getDatabaseTypeDescriptor("db-type"); + static final PropertyDescriptor DATABASE_DIALECT_SERVICE = DatabaseAdapterDescriptor.getDatabaseDialectServiceDescriptor(DB_TYPE); + + protected static final List properties = List.of( + RECORD_READER_FACTORY, + DB_TYPE, + DATABASE_DIALECT_SERVICE, + STATEMENT_TYPE, + STATEMENT_TYPE_RECORD_PATH, + DATA_RECORD_PATH, + DBCP_SERVICE, + CATALOG_NAME, + SCHEMA_NAME, + TABLE_NAME, + BINARY_STRING_FORMAT, + TRANSLATE_FIELD_NAMES, + TRANSLATION_STRATEGY, + TRANSLATION_PATTERN, + UNMATCHED_FIELD_BEHAVIOR, + UNMATCHED_COLUMN_BEHAVIOR, + UPDATE_KEYS, + DELETE_KEYS, + FIELD_CONTAINING_SQL, + ALLOW_MULTIPLE_STATEMENTS, + QUOTE_IDENTIFIERS, + QUOTE_TABLE_IDENTIFIER, + QUERY_TIMEOUT, + RollbackOnFailure.ROLLBACK_ON_FAILURE, + TABLE_SCHEMA_CACHE_SIZE, + MAX_BATCH_SIZE, + AUTO_COMMIT + ); - protected static final Map dbAdapters; - protected static List properties; private Cache schemaCache; - static { - dbAdapters = new HashMap<>(); - ArrayList dbAdapterValues = new ArrayList<>(); - - ServiceLoader dbAdapterLoader = ServiceLoader.load(DatabaseAdapter.class); - dbAdapterLoader.forEach(databaseAdapter -> { - dbAdapters.put(databaseAdapter.getName(), databaseAdapter); - dbAdapterValues.add(new AllowableValue(databaseAdapter.getName(), databaseAdapter.getName(), databaseAdapter.getDescription())); - }); - - DB_TYPE = new Builder() - .name("db-type") - .displayName("Database Type") - .description("The type/flavor of database, used for generating database-specific code. In many cases the Generic type " - + "should suffice, but some databases (such as Oracle) require custom SQL clauses. ") - .allowableValues(dbAdapterValues.toArray(new AllowableValue[0])) - .defaultValue("Generic") - .required(false) - .build(); - - properties = List.of( - RECORD_READER_FACTORY, - DB_TYPE, - STATEMENT_TYPE, - STATEMENT_TYPE_RECORD_PATH, - DATA_RECORD_PATH, - DBCP_SERVICE, - CATALOG_NAME, - SCHEMA_NAME, - TABLE_NAME, - BINARY_STRING_FORMAT, - TRANSLATE_FIELD_NAMES, - TRANSLATION_STRATEGY, - TRANSLATION_PATTERN, - UNMATCHED_FIELD_BEHAVIOR, - UNMATCHED_COLUMN_BEHAVIOR, - UPDATE_KEYS, - DELETE_KEYS, - FIELD_CONTAINING_SQL, - ALLOW_MULTIPLE_STATEMENTS, - QUOTE_IDENTIFIERS, - QUOTE_TABLE_IDENTIFIER, - QUERY_TIMEOUT, - RollbackOnFailure.ROLLBACK_ON_FAILURE, - TABLE_SCHEMA_CACHE_SIZE, - MAX_BATCH_SIZE, - AUTO_COMMIT - ); - } - - private DatabaseAdapter databaseAdapter; + private volatile DatabaseDialectService databaseDialectService; private volatile Function recordPathOperationType; private volatile RecordPath dataRecordPath; @@ -497,18 +486,30 @@ protected List getSupportedPropertyDescriptors() { @Override protected Collection customValidate(ValidationContext validationContext) { - Collection validationResults = new ArrayList<>(super.customValidate(validationContext)); - - DatabaseAdapter databaseAdapter = dbAdapters.get(validationContext.getProperty(DB_TYPE).getValue()); - String statementType = validationContext.getProperty(STATEMENT_TYPE).getValue(); - if ((UPSERT_TYPE.equals(statementType) && !databaseAdapter.supportsUpsert()) - || (INSERT_IGNORE_TYPE.equals(statementType) && !databaseAdapter.supportsInsertIgnore())) { - validationResults.add(new ValidationResult.Builder() - .subject(STATEMENT_TYPE.getDisplayName()) - .valid(false) - .explanation(databaseAdapter.getName() + " does not support " + statementType) - .build() - ); + final Collection validationResults = new ArrayList<>(super.customValidate(validationContext)); + + final DatabaseDialectService dialectService = getDatabaseDialectService(validationContext); + final Set supportedStatementTypes = dialectService.getSupportedStatementTypes(); + final String configuredStatementType = validationContext.getProperty(STATEMENT_TYPE).getValue(); + if (INSERT_IGNORE_TYPE.equals(configuredStatementType)) { + if (!supportedStatementTypes.contains(StatementType.INSERT_IGNORE)) { + validationResults.add(new ValidationResult.Builder() + .subject(STATEMENT_TYPE.getDisplayName()) + .valid(false) + .explanation("INSERT IGNORE not supported with Database Dialect") + .build() + ); + } + } + if (UPSERT_TYPE.equals(configuredStatementType)) { + if (!supportedStatementTypes.contains(StatementType.UPSERT)) { + validationResults.add(new ValidationResult.Builder() + .subject(STATEMENT_TYPE.getDisplayName()) + .valid(false) + .explanation("UPSERT not supported with Database Dialect") + .build() + ); + } } final Boolean autoCommit = validationContext.getProperty(AUTO_COMMIT).asBoolean(); @@ -549,7 +550,7 @@ private boolean isMaxBatchSizeHardcodedToZero(ValidationContext validationContex @OnScheduled public void onScheduled(final ProcessContext context) { - databaseAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); + databaseDialectService = getDatabaseDialectService(context); final int tableSchemaCacheSize = context.getProperty(TABLE_SCHEMA_CACHE_SIZE).asInteger(); schemaCache = Caffeine.newBuilder() @@ -608,6 +609,19 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } + private DatabaseDialectService getDatabaseDialectService(final PropertyContext context) { + final String databaseType = context.getProperty(DB_TYPE).getValue(); + + final DatabaseDialectService service; + if (DatabaseDialectServiceDatabaseAdapter.NAME.equals(databaseType)) { + service = context.getProperty(DATABASE_DIALECT_SERVICE).asControllerService(DatabaseDialectService.class); + } else { + service = new DatabaseAdapterDatabaseDialectService(databaseType); + } + + return service; + } + private void routeOnException(final ProcessContext context, final ProcessSession session, Connection connection, Exception e, FlowFile flowFile) { // When an Exception is thrown, we want to route to 'retry' if we expect that attempting the same request again @@ -843,9 +857,9 @@ private void executeDML(final ProcessContext context, final ProcessSession sessi } else if (DELETE_TYPE.equalsIgnoreCase(statementType)) { sqlHolder = generateDelete(recordSchema, fqTableName, deleteKeys, tableSchema, settings, normalizer); } else if (UPSERT_TYPE.equalsIgnoreCase(statementType)) { - sqlHolder = generateUpsert(recordSchema, fqTableName, updateKeys, tableSchema, settings, normalizer); + sqlHolder = getSqlStatement(StatementType.UPSERT, recordSchema, fqTableName, updateKeys, tableSchema, settings, normalizer); } else if (INSERT_IGNORE_TYPE.equalsIgnoreCase(statementType)) { - sqlHolder = generateInsertIgnore(recordSchema, fqTableName, updateKeys, tableSchema, settings, normalizer); + sqlHolder = getSqlStatement(StatementType.INSERT_IGNORE, recordSchema, fqTableName, updateKeys, tableSchema, settings, normalizer); } else { throw new IllegalArgumentException(format("Statement Type %s is not valid, FlowFile %s", statementType, flowFile)); } @@ -864,7 +878,8 @@ private void executeDML(final ProcessContext context, final ProcessSession sessi } } - preparedSqlAndColumns = new PreparedSqlAndColumns(sqlHolder, preparedStatement); + final int parameterCount = getParameterCount(sqlHolder.sql); + preparedSqlAndColumns = new PreparedSqlAndColumns(sqlHolder, preparedStatement, parameterCount); preparedSql.put(statementType, preparedSqlAndColumns); } @@ -970,7 +985,8 @@ private void executeDML(final ProcessContext context, final ProcessSession sessi setParameter(ps, ++deleteIndex, currentValue, fieldSqlType, sqlType); } } else if (UPSERT_TYPE.equalsIgnoreCase(statementType)) { - final int timesToAddObjects = databaseAdapter.getTimesToAddColumnObjectsForUpsert(); + // Calculate the number of times to set the parameter based on fields divided by parameters + final int timesToAddObjects = fieldIndexes.size() / preparedSqlAndColumns.parameterCount; for (int j = 0; j < timesToAddObjects; j++) { setParameter(ps, i + (fieldIndexes.size() * j) + 1, currentValue, fieldSqlType, sqlType); } @@ -1285,115 +1301,80 @@ SqlAndIncludedColumns generateInsert(final RecordSchema recordSchema, final Stri return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns); } - SqlAndIncludedColumns generateUpsert(final RecordSchema recordSchema, final String tableName, final String updateKeys, - final TableSchema tableSchema, final DMLSettings settings, NameNormalizer normalizer) - throws IllegalArgumentException, SQLException, MalformedRecordException { - + private SqlAndIncludedColumns getSqlStatement( + final StatementType statementType, + final RecordSchema recordSchema, + final String qualifiedTableName, + final String updateKeys, + final TableSchema tableSchema, + final DMLSettings settings, + final NameNormalizer normalizer + ) throws MalformedRecordException, SQLDataException, SQLIntegrityConstraintViolationException { checkValuesForRequiredColumns(recordSchema, tableSchema, settings, normalizer); - Set keyColumnNames = getUpdateKeyColumnNames(tableName, updateKeys, tableSchema); + final Set keyColumnNames = getUpdateKeyColumnNames(qualifiedTableName, updateKeys, tableSchema); normalizeKeyColumnNamesAndCheckForValues(recordSchema, updateKeys, settings, keyColumnNames, normalizer); - List usedColumnNames = new ArrayList<>(); - List usedColumnIndices = new ArrayList<>(); - - List fieldNames = recordSchema.getFieldNames(); - if (fieldNames != null) { - int fieldCount = fieldNames.size(); - - for (int i = 0; i < fieldCount; i++) { - RecordField field = recordSchema.getField(i); - String fieldName = field.getFieldName(); - - final ColumnDescription desc = tableSchema.getColumns().get(TableSchema.normalizedName(fieldName, settings.translateFieldNames, normalizer)); - if (desc == null && !settings.ignoreUnmappedFields) { - throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database\n" - + (settings.translateFieldNames ? "Normalized " : "") + "Columns: " + String.join(",", tableSchema.getColumns().keySet())); - } - - if (desc != null) { - if (settings.escapeColumnNames) { - usedColumnNames.add(tableSchema.getQuotedIdentifierString() + desc.getColumnName() + tableSchema.getQuotedIdentifierString()); - } else { - usedColumnNames.add(desc.getColumnName()); - } - usedColumnIndices.add(i); + final List columnDefinitions = new ArrayList<>(); + final List usedColumnIndices = new ArrayList<>(); + final List fieldNames = recordSchema.getFieldNames(); + final int fieldCount = fieldNames.size(); + for (int i = 0; i < fieldCount; i++) { + final RecordField field = recordSchema.getField(i); + final String fieldName = field.getFieldName(); + + final String columnNameNormalized = TableSchema.normalizedName(fieldName, settings.translateFieldNames, normalizer); + final ColumnDescription columnDescription = tableSchema.getColumns().get(columnNameNormalized); + if (columnDescription == null) { + if (settings.ignoreUnmappedFields) { + continue; } else { - // User is ignoring unmapped fields, but log at debug level just in case - getLogger().debug("Did not map field '{}' to any column in the database\n{}Columns: {}", - fieldName, (settings.translateFieldNames ? "Normalized " : ""), String.join(",", tableSchema.getColumns().keySet())); + final String tableColumnNames = String.join(",", tableSchema.getColumns().keySet()); + final String message = "Record Field [%s] not mapped to Table Columns [%s]".formatted(fieldName, tableColumnNames); + throw new SQLDataException(message); } } - } - final Set literalKeyColumnNames = new HashSet<>(keyColumnNames.size()); - for (String literalKeyColumnName : keyColumnNames) { + final String columnName; if (settings.escapeColumnNames) { - literalKeyColumnNames.add(tableSchema.getQuotedIdentifierString() + literalKeyColumnName + tableSchema.getQuotedIdentifierString()); + final String quotedIdentifier = tableSchema.getQuotedIdentifierString(); + columnName = quotedIdentifier + columnDescription.getColumnName() + quotedIdentifier; } else { - literalKeyColumnNames.add(literalKeyColumnName); + columnName = columnDescription.getColumnName(); } - } - String sql = databaseAdapter.getUpsertStatement(tableName, usedColumnNames, literalKeyColumnNames); - return new SqlAndIncludedColumns(sql, usedColumnIndices); - } - - SqlAndIncludedColumns generateInsertIgnore(final RecordSchema recordSchema, final String tableName, final String updateKeys, - final TableSchema tableSchema, final DMLSettings settings, NameNormalizer normalizer) - throws IllegalArgumentException, SQLException, MalformedRecordException { - - checkValuesForRequiredColumns(recordSchema, tableSchema, settings, normalizer); - - Set keyColumnNames = getUpdateKeyColumnNames(tableName, updateKeys, tableSchema); - normalizeKeyColumnNamesAndCheckForValues(recordSchema, updateKeys, settings, keyColumnNames, normalizer); - - List usedColumnNames = new ArrayList<>(); - List usedColumnIndices = new ArrayList<>(); - - List fieldNames = recordSchema.getFieldNames(); - if (fieldNames != null) { - int fieldCount = fieldNames.size(); - - for (int i = 0; i < fieldCount; i++) { - RecordField field = recordSchema.getField(i); - String fieldName = field.getFieldName(); - - final ColumnDescription desc = tableSchema.getColumns().get(TableSchema.normalizedName(fieldName, settings.translateFieldNames, normalizer)); - if (desc == null && !settings.ignoreUnmappedFields) { - throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database\n" - + (settings.translateFieldNames ? "Normalized " : "") + "Columns: " + String.join(",", tableSchema.getColumns().keySet())); - } - - if (desc != null) { - if (settings.escapeColumnNames) { - usedColumnNames.add(tableSchema.getQuotedIdentifierString() + desc.getColumnName() + tableSchema.getQuotedIdentifierString()); - } else { - usedColumnNames.add(desc.getColumnName()); - } - usedColumnIndices.add(i); - } else { - // User is ignoring unmapped fields, but log at debug level just in case - getLogger().debug("Did not map field '{}' to any column in the database\n{}Columns: {}", - fieldName, (settings.translateFieldNames ? "Normalized " : ""), String.join(",", tableSchema.getColumns().keySet())); - } - } - } + final ColumnDefinition columnDefinition = getColumnDefinition(columnDescription, keyColumnNames, columnName); + columnDefinitions.add(columnDefinition); - final Set literalKeyColumnNames = new HashSet<>(keyColumnNames.size()); - for (String literalKeyColumnName : keyColumnNames) { - if (settings.escapeColumnNames) { - literalKeyColumnNames.add(tableSchema.getQuotedIdentifierString() + literalKeyColumnName + tableSchema.getQuotedIdentifierString()); - } else { - literalKeyColumnNames.add(literalKeyColumnName); - } + usedColumnIndices.add(i); } - String sql = databaseAdapter.getInsertIgnoreStatement(tableName, usedColumnNames, literalKeyColumnNames); + final TableDefinition tableDefinition = new TableDefinition( + Optional.empty(), + Optional.empty(), + qualifiedTableName, + columnDefinitions + ); + final StatementRequest statementRequest = new StandardStatementRequest(statementType, tableDefinition); + final StatementResponse statementResponse = databaseDialectService.getStatement(statementRequest); + final String sql = statementResponse.sql(); return new SqlAndIncludedColumns(sql, usedColumnIndices); } + private ColumnDefinition getColumnDefinition(final ColumnDescription columnDescription, final Set keyColumnNames, final String columnName) { + final int dataType = columnDescription.getDataType(); + final boolean primaryKey = keyColumnNames.contains(columnDescription.getColumnName()); + final StandardColumnDefinition.Nullable nullable = columnDescription.isNullable() ? StandardColumnDefinition.Nullable.YES : StandardColumnDefinition.Nullable.NO; + return new StandardColumnDefinition( + columnName, + dataType, + nullable, + Optional.empty(), + primaryKey + ); + } + SqlAndIncludedColumns generateUpdate(final RecordSchema recordSchema, final String tableName, final String updateKeys, final TableSchema tableSchema, final DMLSettings settings, NameNormalizer normalizer) throws IllegalArgumentException, MalformedRecordException, SQLException { @@ -1657,6 +1638,16 @@ private boolean isSupportsBatchUpdates(Connection connection) { } } + private int getParameterCount(final String sql) { + int parameterCount = 0; + for (char character : sql.toCharArray()) { + if ('?' == character) { + parameterCount++; + } + } + return parameterCount; + } + static class SchemaKey { private final String catalog; private final String schemaName; @@ -1721,10 +1712,12 @@ public List getFieldIndexes() { static class PreparedSqlAndColumns { private final SqlAndIncludedColumns sqlAndIncludedColumns; private final PreparedStatement preparedStatement; + private final int parameterCount; - public PreparedSqlAndColumns(final SqlAndIncludedColumns sqlAndIncludedColumns, final PreparedStatement preparedStatement) { + public PreparedSqlAndColumns(final SqlAndIncludedColumns sqlAndIncludedColumns, final PreparedStatement preparedStatement, final int parameterCount) { this.sqlAndIncludedColumns = sqlAndIncludedColumns; this.preparedStatement = preparedStatement; + this.parameterCount = parameterCount; } public SqlAndIncludedColumns getSqlAndIncludedColumns() { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java index fbed15c0ea0f..bf4732491502 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java @@ -95,6 +95,7 @@ public class QueryDatabaseTable extends AbstractQueryDatabaseTable { private static final List PROPERTIES = List.of( DBCP_SERVICE, DB_TYPE, + DATABASE_DIALECT_SERVICE, TABLE_NAME, COLUMN_NAMES, WHERE_CLAUSE, diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java index 7e40f702e7f3..f2327f6379ac 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java @@ -194,6 +194,7 @@ public class QueryDatabaseTableRecord extends AbstractQueryDatabaseTable { private static final List PROPERTIES = List.of( DBCP_SERVICE, DB_TYPE, + DATABASE_DIALECT_SERVICE, TABLE_NAME, COLUMN_NAMES, WHERE_CLAUSE, diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java index 00b9d1ba5224..75686ddd61fe 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java @@ -26,6 +26,15 @@ import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.Validator; +import org.apache.nifi.context.PropertyContext; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.StandardStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; @@ -39,12 +48,14 @@ import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processor.util.pattern.DiscontinuedException; import org.apache.nifi.processors.standard.db.ColumnDescription; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; +import org.apache.nifi.processors.standard.db.DatabaseAdapterDescriptor; import org.apache.nifi.processors.standard.db.NameNormalizer; import org.apache.nifi.processors.standard.db.NameNormalizerFactory; import org.apache.nifi.processors.standard.db.TableNotFoundException; import org.apache.nifi.processors.standard.db.TableSchema; import org.apache.nifi.processors.standard.db.TranslationStrategy; +import org.apache.nifi.processors.standard.db.impl.DatabaseAdapterDatabaseDialectService; +import org.apache.nifi.processors.standard.db.impl.DatabaseDialectServiceDatabaseAdapter; import org.apache.nifi.serialization.MalformedRecordException; import org.apache.nifi.serialization.RecordReader; import org.apache.nifi.serialization.RecordReaderFactory; @@ -72,7 +83,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.ServiceLoader; +import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; @@ -257,8 +268,8 @@ public class UpdateDatabaseTable extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); - protected static final Map dbAdapters; - static final PropertyDescriptor DB_TYPE; + static final PropertyDescriptor DB_TYPE = DatabaseAdapterDescriptor.getDatabaseTypeDescriptor("db-type"); + static final PropertyDescriptor DATABASE_DIALECT_SERVICE = DatabaseAdapterDescriptor.getDatabaseDialectServiceDescriptor(DB_TYPE); private static final List properties; // Relationships @@ -278,29 +289,11 @@ public class UpdateDatabaseTable extends AbstractProcessor { ); static { - dbAdapters = new HashMap<>(); - ArrayList dbAdapterValues = new ArrayList<>(); - - ServiceLoader dbAdapterLoader = ServiceLoader.load(DatabaseAdapter.class); - dbAdapterLoader.forEach(databaseAdapter -> { - dbAdapters.put(databaseAdapter.getName(), databaseAdapter); - dbAdapterValues.add(new AllowableValue(databaseAdapter.getName(), databaseAdapter.getName(), databaseAdapter.getDescription())); - }); - - DB_TYPE = new PropertyDescriptor.Builder() - .name("db-type") - .displayName("Database Type") - .description("The type/flavor of database, used for generating database-specific code. In many cases the Generic type " - + "should suffice, but some databases (such as Oracle) require custom SQL clauses.") - .allowableValues(dbAdapterValues.toArray(new AllowableValue[0])) - .defaultValue("Generic") - .required(false) - .build(); - properties = List.of( RECORD_READER, DBCP_SERVICE, DB_TYPE, + DATABASE_DIALECT_SERVICE, CATALOG_NAME, SCHEMA_NAME, TABLE_NAME, @@ -338,12 +331,6 @@ protected Collection customValidate(final ValidationContext va .explanation("Record Writer must be set if 'Update Field Names' is true").valid(false).build()); } - final DatabaseAdapter databaseAdapter = dbAdapters.get(validationContext.getProperty(DB_TYPE).getValue()); - final boolean createIfNotExists = CREATE_IF_NOT_EXISTS.getValue().equals(validationContext.getProperty(CREATE_TABLE).getValue()); - if (createIfNotExists && !databaseAdapter.supportsCreateTableIfNotExists()) { - validationResults.add(new ValidationResult.Builder().subject(CREATE_TABLE.getDisplayName()) - .explanation("The specified Database Type does not support Create If Not Exists").valid(false).build()); - } return validationResults; } @@ -409,7 +396,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session throw new ProcessException("Record Writer must be set if 'Update Field Names' is true"); } final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); - final DatabaseAdapter databaseAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue()); + final DatabaseDialectService databaseDialectService = getDatabaseDialectService(context); try (final Connection connection = dbcpService.getConnection(flowFile.getAttributes())) { final boolean quoteTableName = context.getProperty(QUOTE_TABLE_IDENTIFIER).asBoolean(); final boolean quoteColumnNames = context.getProperty(QUOTE_COLUMN_IDENTIFIERS).asBoolean(); @@ -426,7 +413,7 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } else { primaryKeyColumnNames = null; } - final OutputMetadataHolder outputMetadataHolder = checkAndUpdateTableSchema(connection, databaseAdapter, recordSchema, + final OutputMetadataHolder outputMetadataHolder = checkAndUpdateTableSchema(connection, databaseDialectService, recordSchema, catalogName, schemaName, tableName, createIfNotExists, translateFieldNames, normalizer, updateFieldNames, primaryKeyColumnNames, quoteTableName, quoteColumnNames); if (outputMetadataHolder != null) { @@ -490,11 +477,21 @@ public void onTrigger(final ProcessContext context, final ProcessSession session } } - private synchronized OutputMetadataHolder checkAndUpdateTableSchema(final Connection conn, final DatabaseAdapter databaseAdapter, final RecordSchema schema, - final String catalogName, final String schemaName, final String tableName, - final boolean createIfNotExists, final boolean translateFieldNames, final NameNormalizer normalizer, - final boolean updateFieldNames, final Set primaryKeyColumnNames, final boolean quoteTableName, - final boolean quoteColumnNames) throws IOException { + private synchronized OutputMetadataHolder checkAndUpdateTableSchema( + final Connection conn, + final DatabaseDialectService databaseDialectService, + final RecordSchema schema, + final String catalogName, + final String schemaName, + final String tableName, + final boolean createIfNotExists, + final boolean translateFieldNames, + final NameNormalizer normalizer, + final boolean updateFieldNames, + final Set primaryKeyColumnNames, + final boolean quoteTableName, + final boolean quoteColumnNames + ) throws IOException { // Read in the current table metadata, compare it to the reader's schema, and // add any columns from the schema that are missing in the table try (final Statement s = conn.createStatement()) { @@ -510,18 +507,43 @@ private synchronized OutputMetadataHolder checkAndUpdateTableSchema(final Connec boolean tableCreated = false; if (tableSchema == null) { if (createIfNotExists) { + final DatabaseMetaData databaseMetaData = conn.getMetaData(); + final String quoteString = databaseMetaData.getIdentifierQuoteString(); + // Create a TableSchema from the record, adding all columns for (RecordField recordField : schema.getFields()) { String recordFieldName = recordField.getFieldName(); // Assume a column to be created is required if there is a default value in the schema final boolean required = (recordField.getDefaultValue() != null); - columns.add(new ColumnDescription(recordFieldName, DataTypeUtils.getSQLTypeValue(recordField.getDataType()), required, null, recordField.isNullable())); - getLogger().debug("Adding column {} to table {}", recordFieldName, tableName); + + final String columnName; + if (translateFieldNames) { + columnName = normalizer.getNormalizedName(recordFieldName); + } else { + columnName = recordFieldName; + } + + final String qualifiedColumnName; + if (quoteColumnNames) { + qualifiedColumnName = s.enquoteIdentifier(columnName, true); + } else { + qualifiedColumnName = columnName; + } + + final int dataType = DataTypeUtils.getSQLTypeValue(recordField.getDataType()); + columns.add(new ColumnDescription(qualifiedColumnName, dataType, required, null, recordField.isNullable())); + getLogger().debug("Adding column {} to table {}", columnName, tableName); } - tableSchema = new TableSchema(catalogName, schemaName, tableName, columns, translateFieldNames, normalizer, primaryKeyColumnNames, databaseAdapter.getColumnQuoteString()); + final String qualifiedCatalogName = catalogName == null ? null : s.enquoteIdentifier(catalogName, quoteTableName); + final String qualifiedSchemaName = schemaName == null ? null : s.enquoteIdentifier(schemaName, quoteTableName); + final String qualifiedTableName = s.enquoteIdentifier(tableName, quoteTableName); + tableSchema = new TableSchema(qualifiedCatalogName, qualifiedSchemaName, qualifiedTableName, columns, translateFieldNames, normalizer, primaryKeyColumnNames, quoteString); - final String createTableSql = databaseAdapter.getCreateTableStatement(tableSchema, quoteTableName, quoteColumnNames); + final TableDefinition tableDefinition = getTableDefinition(tableSchema); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.CREATE, tableDefinition); + final StatementResponse statementResponse = databaseDialectService.getStatement(statementRequest); + final String createTableSql = statementResponse.sql(); if (StringUtils.isNotEmpty(createTableSql)) { // Perform the table create @@ -550,7 +572,7 @@ private synchronized OutputMetadataHolder checkAndUpdateTableSchema(final Connec final String normalizedFieldName = TableSchema.normalizedName(recordFieldName, translateFieldNames, normalizer); if (!dbColumns.contains(normalizedFieldName)) { // The field does not exist in the table, add it - ColumnDescription columnToAdd = new ColumnDescription(recordFieldName, DataTypeUtils.getSQLTypeValue(recordField.getDataType()), + ColumnDescription columnToAdd = new ColumnDescription(normalizedFieldName, DataTypeUtils.getSQLTypeValue(recordField.getDataType()), recordField.getDefaultValue() != null, null, recordField.isNullable()); columnsToAdd.add(columnToAdd); dbColumns.add(recordFieldName); @@ -559,18 +581,24 @@ private synchronized OutputMetadataHolder checkAndUpdateTableSchema(final Connec } if (!columnsToAdd.isEmpty()) { - final List alterTableSqlStatements = databaseAdapter.getAlterTableStatements(tableName, columnsToAdd, quoteTableName, quoteColumnNames); - - if (alterTableSqlStatements != null && !alterTableSqlStatements.isEmpty()) { - for (String alterTableSql : alterTableSqlStatements) { - if (StringUtils.isEmpty(alterTableSql)) { - continue; - } - // Perform the table update - getLogger().info("Executing DDL: {}", alterTableSql); - s.execute(alterTableSql); - } - } + final List columnDefinitions = columnsToAdd.stream().map(columnDescription -> + new StandardColumnDefinition( + columnDescription.getColumnName(), + columnDescription.getDataType(), + columnDescription.isNullable() ? ColumnDefinition.Nullable.YES : ColumnDefinition.Nullable.UNKNOWN, + Optional.empty(), + columnDescription.isRequired() + ) + ) + .map(ColumnDefinition.class::cast) + .toList(); + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, columnDefinitions); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.ALTER, tableDefinition); + final StatementResponse statementResponse = databaseDialectService.getStatement(statementRequest); + + // Perform the table update + getLogger().info("Executing DDL: {}", statementResponse.sql()); + s.execute(statementResponse.sql()); } } @@ -649,6 +677,42 @@ private String getJdbcUrl(final Connection connection) { return "DBCPService"; } + private DatabaseDialectService getDatabaseDialectService(final PropertyContext context) { + final DatabaseDialectService databaseDialectService; + final String databaseType = context.getProperty(DB_TYPE).getValue(); + if (DatabaseDialectServiceDatabaseAdapter.NAME.equals(databaseType)) { + databaseDialectService = context.getProperty(DATABASE_DIALECT_SERVICE).asControllerService(DatabaseDialectService.class); + } else { + databaseDialectService = new DatabaseAdapterDatabaseDialectService(databaseType); + } + return databaseDialectService; + } + + private TableDefinition getTableDefinition(final TableSchema tableSchema) { + final Set primaryKeyColumnNames = tableSchema.getPrimaryKeyColumnNames(); + final Set primaryKeys = primaryKeyColumnNames == null ? Set.of() : primaryKeyColumnNames; + + final List columnDefinitions = tableSchema.getColumnsAsList().stream() + .map(columnDescription -> + new StandardColumnDefinition( + columnDescription.getColumnName(), + columnDescription.getDataType(), + columnDescription.isNullable() ? ColumnDefinition.Nullable.YES : ColumnDefinition.Nullable.NO, + Optional.empty(), + primaryKeys.contains(columnDescription.getColumnName()) + ) + ) + .map(ColumnDefinition.class::cast) + .toList(); + + return new TableDefinition( + Optional.ofNullable(tableSchema.getCatalogName()), + Optional.ofNullable(tableSchema.getSchemaName()), + tableSchema.getTableName(), + columnDefinitions + ); + } + private static class OutputMetadataHolder { private final RecordSchema outputSchema; private final Map fieldMap; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java index 156cd1285a8b..9970f3ac991d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java @@ -80,16 +80,6 @@ default boolean supportsInsertIgnore() { return false; } - /** - * Tells How many times the column values need to be inserted into the prepared statement. Some DBs (such as MySQL) need the values specified twice in the statement, - * some need only to specify them once. - * - * @return An integer corresponding to the number of times to insert column values into the prepared statement for UPSERT, or -1 if upsert is not supported. - */ - default int getTimesToAddColumnObjectsForUpsert() { - return supportsUpsert() ? 1 : -1; - } - /** * Returns an SQL UPSERT statement - i.e. UPDATE record or INSERT if id doesn't exist. *

@@ -120,24 +110,15 @@ default String getInsertIgnoreStatement(String table, List columnNames, throw new UnsupportedOperationException("UPSERT is not supported for " + getName()); } - /** - *

Returns a bare identifier string by removing wrapping escape characters - * from identifier strings such as table and column names.

- *

The default implementation of this method removes double quotes. - * If the target database engine supports different escape characters, then its DatabaseAdapter implementation should override - * this method so that such escape characters can be removed properly.

- * - * @param identifier An identifier which may be wrapped with escape characters - * @return An unwrapped identifier string, or null if the input identifier is null - */ - default String unwrapIdentifier(String identifier) { - return identifier == null ? null : identifier.replaceAll("\"", ""); - } - default String getTableAliasClause(String tableName) { return "AS " + tableName; } + /** + * Table Quote String usage limited to statement generation methods within DatabaseAdapter + * + * @return Table Quote String + */ default String getTableQuoteString() { // ANSI standard is a double quote return "\""; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapterDescriptor.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapterDescriptor.java new file mode 100644 index 000000000000..d3d5bac52207 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapterDescriptor.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.standard.db; + +import org.apache.nifi.components.AllowableValue; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.processors.standard.db.impl.DatabaseDialectServiceDatabaseAdapter; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.ServiceLoader; + +public class DatabaseAdapterDescriptor { + + private static final List databaseTypes = new ArrayList<>(); + + private static final Map databaseAdapters = new HashMap<>(); + + static { + final ServiceLoader loader = ServiceLoader.load(DatabaseAdapter.class); + loader.forEach(databaseAdapter -> { + final String name = databaseAdapter.getName(); + final String description = databaseAdapter.getDescription(); + final AllowableValue databaseType = new AllowableValue(name, name, description); + databaseTypes.add(databaseType); + databaseAdapters.put(name, databaseAdapter); + }); + } + + public static PropertyDescriptor getDatabaseDialectServiceDescriptor(final PropertyDescriptor dependsOnPropertyDescriptor) { + return new PropertyDescriptor.Builder() + .name("Database Dialect Service") + .description("Database Dialect Service for generating statements specific to a particular service or vendor.") + .identifiesControllerService(DatabaseDialectService.class) + .required(true) + .dependsOn(dependsOnPropertyDescriptor, DatabaseDialectServiceDatabaseAdapter.NAME) + .build(); + } + + public static PropertyDescriptor getDatabaseTypeDescriptor(final String propertyName) { + return new PropertyDescriptor.Builder() + .name(propertyName) + .displayName("Database Type") + .description(""" + Database Type for generating statements specific to a particular service or vendor. + The Generic Type supports most cases but selecting a specific type enables optimal processing + or additional features. + """ + ) + .allowableValues(databaseTypes.toArray(new AllowableValue[0])) + .defaultValue("Generic") + .required(true) + .build(); + } + + public static DatabaseAdapter getDatabaseAdapter(final String databaseType) { + Objects.requireNonNull(databaseType, "Database Type required"); + return databaseAdapters.get(databaseType); + } +} diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/DatabaseAdapterDatabaseDialectService.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/DatabaseAdapterDatabaseDialectService.java new file mode 100644 index 000000000000..df8e947b9a26 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/DatabaseAdapterDatabaseDialectService.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.standard.db.impl; + +import org.apache.nifi.controller.AbstractControllerService; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.PageRequest; +import org.apache.nifi.database.dialect.service.api.QueryClause; +import org.apache.nifi.database.dialect.service.api.QueryClauseType; +import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardStatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; +import org.apache.nifi.processors.standard.db.ColumnDescription; +import org.apache.nifi.processors.standard.db.DatabaseAdapter; +import org.apache.nifi.processors.standard.db.DatabaseAdapterDescriptor; +import org.apache.nifi.processors.standard.db.TableSchema; + +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Transitional implementation of Database Dialect Service bridging to existing Database Adapters + */ +public class DatabaseAdapterDatabaseDialectService extends AbstractControllerService implements DatabaseDialectService { + private static final char SPACE_SEPARATOR = ' '; + + private static final char COMMA_SEPARATOR = ','; + + private static final int COLUMN_SIZE_IGNORED = -1; + + private static final String DOUBLE_QUOTE = "\""; + + private final DatabaseAdapter databaseAdapter; + + private final Set supportedStatementTypes; + + public DatabaseAdapterDatabaseDialectService(final String databaseType) { + Objects.requireNonNull(databaseType, "Database Type required"); + databaseAdapter = DatabaseAdapterDescriptor.getDatabaseAdapter(databaseType); + Objects.requireNonNull(databaseAdapter, "Database Adapter required"); + + final Set statementTypes = new LinkedHashSet<>(); + statementTypes.add(StatementType.ALTER); + statementTypes.add(StatementType.CREATE); + statementTypes.add(StatementType.SELECT); + + if (databaseAdapter.supportsInsertIgnore()) { + statementTypes.add(StatementType.INSERT_IGNORE); + } + if (databaseAdapter.supportsUpsert()) { + statementTypes.add(StatementType.UPSERT); + } + supportedStatementTypes = Collections.unmodifiableSet(statementTypes); + } + + @Override + public StatementResponse getStatement(final StatementRequest statementRequest) { + final StatementType statementType = statementRequest.statementType(); + + final TableDefinition tableDefinition = statementRequest.tableDefinition(); + final List columnNames = tableDefinition.columns() + .stream() + .map(ColumnDefinition::columnName) + .toList(); + final List primaryKeyColumnNames = tableDefinition.columns() + .stream() + .filter(ColumnDefinition::primaryKey) + .map(ColumnDefinition::columnName) + .toList(); + final List columnDescriptions = getColumnDescriptions(tableDefinition); + + final String sql; + + if (StatementType.ALTER == statementType) { + final List statements = databaseAdapter.getAlterTableStatements(tableDefinition.tableName(), columnDescriptions, true, true); + sql = statements.getFirst(); + } else if (StatementType.CREATE == statementType) { + final TableSchema tableSchema = getTableSchema(tableDefinition); + sql = databaseAdapter.getCreateTableStatement(tableSchema, false, false); + } else if (StatementType.UPSERT == statementType) { + sql = databaseAdapter.getUpsertStatement(tableDefinition.tableName(), columnNames, primaryKeyColumnNames); + } else if (StatementType.INSERT_IGNORE == statementType) { + sql = databaseAdapter.getInsertIgnoreStatement(tableDefinition.tableName(), columnNames, primaryKeyColumnNames); + } else if (StatementType.SELECT == statementType) { + sql = getSelectStatement(statementRequest); + } else { + throw new UnsupportedOperationException("Statement Type [%s] not supported".formatted(statementType)); + } + + return new StandardStatementResponse(sql); + } + + @Override + public Set getSupportedStatementTypes() { + return supportedStatementTypes; + } + + private String getSelectStatement(final StatementRequest statementRequest) { + if (statementRequest instanceof QueryStatementRequest queryStatementRequest) { + final TableDefinition tableDefinition = statementRequest.tableDefinition(); + final String qualifiedTableName = tableDefinition.tableName(); + final Optional derivedTableFound = queryStatementRequest.derivedTable(); + + final Optional whereQueryClause = queryStatementRequest.queryClauses().stream() + .filter(queryClause -> QueryClauseType.WHERE == queryClause.queryClauseType()) + .findFirst(); + final Optional orderByQueryClause = queryStatementRequest.queryClauses().stream() + .filter(queryClause -> QueryClauseType.ORDER_BY == queryClause.queryClauseType()) + .findFirst(); + + final String selectTableSql; + if (derivedTableFound.isPresent()) { + final String derivedTable = derivedTableFound.get(); + final String tableAlias = databaseAdapter.getTableAliasClause(qualifiedTableName); + selectTableSql = "SELECT * FROM (%s) %s".formatted(derivedTable, tableAlias); + } else { + final String tableColumns = getSelectTableColumns(tableDefinition.columns()); + + final Optional pageRequestFound = queryStatementRequest.pageRequest(); + final Long limit; + final Long offset; + final String indexColumnName; + if (pageRequestFound.isPresent()) { + final PageRequest pageRequest = pageRequestFound.get(); + limit = pageRequest.limit().isPresent() ? pageRequest.limit().getAsLong() : null; + offset = pageRequest.offset(); + indexColumnName = pageRequest.indexColumnName().orElse(null); + } else { + limit = null; + offset = null; + indexColumnName = null; + } + + final String whereSql = whereQueryClause.map(QueryClause::criteria).orElse(null); + final String orderBySql = orderByQueryClause.map(QueryClause::criteria).orElse(null); + + selectTableSql = databaseAdapter.getSelectStatement(qualifiedTableName, tableColumns, whereSql, orderBySql, limit, offset, indexColumnName); + } + + return selectTableSql; + } else { + throw new IllegalArgumentException("Query Statement Request not found [%s]".formatted(statementRequest.getClass())); + } + } + + private String getSelectTableColumns(final List columnDefinitions) { + final StringBuilder tableColumns = new StringBuilder(); + + final Iterator columns = columnDefinitions.iterator(); + while (columns.hasNext()) { + final ColumnDefinition columnDefinition = columns.next(); + final String columnName = columnDefinition.columnName(); + tableColumns.append(columnName); + + if (columns.hasNext()) { + tableColumns.append(COMMA_SEPARATOR); + tableColumns.append(SPACE_SEPARATOR); + } + } + + return tableColumns.toString(); + } + + private List getColumnDescriptions(final TableDefinition tableDefinition) { + return tableDefinition.columns().stream().map(columnDefinition -> + new ColumnDescription( + columnDefinition.columnName(), + columnDefinition.dataType(), + columnDefinition.primaryKey(), + COLUMN_SIZE_IGNORED, + columnDefinition.nullable() == ColumnDefinition.Nullable.YES + ) + ).toList(); + } + + private TableSchema getTableSchema(final TableDefinition tableDefinition) { + final List columnDescriptions = getColumnDescriptions(tableDefinition); + final Set primaryKeyColumnNames = tableDefinition.columns().stream() + .filter(ColumnDefinition::primaryKey) + .map(ColumnDefinition::columnName) + .collect(Collectors.toUnmodifiableSet()); + + return new TableSchema( + tableDefinition.catalog().orElse(null), + tableDefinition.schemaName().orElse(null), + tableDefinition.tableName(), + columnDescriptions, + false, + null, + primaryKeyColumnNames, + DOUBLE_QUOTE + ); + } +} diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/DatabaseDialectServiceDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/DatabaseDialectServiceDatabaseAdapter.java new file mode 100644 index 000000000000..6cb1fb06d299 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/DatabaseDialectServiceDatabaseAdapter.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.processors.standard.db.impl; + +/** + * Placeholder implementation to bridge between historical Database Adapter and Database Dialect Service + */ +public class DatabaseDialectServiceDatabaseAdapter extends GenericDatabaseAdapter { + public static final String NAME = "Database Dialect Service"; + + @Override + public String getName() { + return NAME; + } + + @Override + public String getDescription() { + return "Requires configuring a Database Dialect Service for SQL statements"; + } +} diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java index 851268eaecaf..6536709ecb0d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java @@ -103,12 +103,6 @@ public String getSelectStatement(String tableName, String columnNames, String wh return query.toString(); } - @Override - public String unwrapIdentifier(String identifier) { - // Remove double quotes and square brackets. - return identifier == null ? null : identifier.replaceAll("[\"\\[\\]]", ""); - } - @Override public List getAlterTableStatements(final String tableName, final List columnsToAdd, final boolean quoteTableName, final boolean quoteColumnNames) { List columnsAndDatatypes = new ArrayList<>(columnsToAdd.size()); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java index 6e9e31732278..6b76b29a1e26 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java @@ -51,12 +51,6 @@ public String getDescription() { return "Generates MySQL compatible SQL"; } - @Override - public String unwrapIdentifier(String identifier) { - // Removes double quotes and back-ticks. - return identifier == null ? null : identifier.replaceAll("[\"`]", ""); - } - @Override public boolean supportsUpsert() { return true; @@ -67,17 +61,6 @@ public boolean supportsInsertIgnore() { return true; } - /** - * Tells How many times the column values need to be inserted into the prepared statement. Some DBs (such as MySQL) need the values specified twice in the statement, - * some need only to specify them once. - * - * @return An integer corresponding to the number of times to insert column values into the prepared statement for UPSERT, or -1 if upsert is not supported. - */ - @Override - public int getTimesToAddColumnObjectsForUpsert() { - return 2; - } - @Override public String getUpsertStatement(String table, List columnNames, Collection uniqueKeyColumnNames) { if (StringUtils.isEmpty(table)) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/resources/META-INF/services/org.apache.nifi.processors.standard.db.DatabaseAdapter b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/resources/META-INF/services/org.apache.nifi.processors.standard.db.DatabaseAdapter index 641223d21bbf..c0ba77b921c4 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/resources/META-INF/services/org.apache.nifi.processors.standard.db.DatabaseAdapter +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/resources/META-INF/services/org.apache.nifi.processors.standard.db.DatabaseAdapter @@ -12,6 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +org.apache.nifi.processors.standard.db.impl.DatabaseDialectServiceDatabaseAdapter org.apache.nifi.processors.standard.db.impl.GenericDatabaseAdapter org.apache.nifi.processors.standard.db.impl.OracleDatabaseAdapter org.apache.nifi.processors.standard.db.impl.Oracle12DatabaseAdapter diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/PutDatabaseRecordIT.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/PutDatabaseRecordIT.java index 1c2278e7542b..a0dbc204ac4a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/PutDatabaseRecordIT.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/PutDatabaseRecordIT.java @@ -56,6 +56,16 @@ public class PutDatabaseRecordIT { private final long NANOS_AFTER_SECOND = 351567000L; private final Instant INSTANT_MICROS_PRECISION = Instant.ofEpochMilli(MILLIS_TIMESTAMP_LONG).plusNanos(NANOS_AFTER_SECOND).minusMillis(MILLIS_TIMESTAMP_LONG % 1000); + private static final String SIMPLE_INPUT_RECORD = """ + { + "name": "John Doe", + "age": 50, + "favorite_color": "blue" + } + """; + + private static final String FAVORITE_COLOR_FIELD = "favorite_color"; + private static final String FAVORITE_COLOR = "blue"; private static PostgreSQLContainer postgres; private TestRunner runner; @@ -106,18 +116,36 @@ public void setup() throws InitializationException, SQLException { @Test public void testSimplePut() throws SQLException { - runner.enqueue(""" - { - "name": "John Doe", - "age": 50, - "favorite_color": "blue" - } - """); + runner.enqueue(SIMPLE_INPUT_RECORD); + runner.run(); + runner.assertAllFlowFilesTransferred(PutDatabaseRecord.REL_SUCCESS, 1); + + final Map results = getResults(); + assertEquals(FAVORITE_COLOR, results.get(FAVORITE_COLOR_FIELD)); + } + + @Test + public void testUpsert() throws SQLException { + runner.setProperty(PutDatabaseRecord.STATEMENT_TYPE, "UPSERT"); + + runner.enqueue(SIMPLE_INPUT_RECORD); + runner.run(); + runner.assertAllFlowFilesTransferred(PutDatabaseRecord.REL_SUCCESS, 1); + + final Map results = getResults(); + assertEquals(FAVORITE_COLOR, results.get(FAVORITE_COLOR_FIELD)); + } + + @Test + public void testInsertIgnore() throws SQLException { + runner.setProperty(PutDatabaseRecord.STATEMENT_TYPE, "INSERT_IGNORE"); + + runner.enqueue(SIMPLE_INPUT_RECORD); runner.run(); runner.assertAllFlowFilesTransferred(PutDatabaseRecord.REL_SUCCESS, 1); final Map results = getResults(); - assertEquals("blue", results.get("favorite_color")); + assertEquals(FAVORITE_COLOR, results.get(FAVORITE_COLOR_FIELD)); } @Test diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableIT.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableIT.java index 6173d9ee51b4..aefa4076812e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableIT.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableIT.java @@ -18,17 +18,11 @@ import org.apache.nifi.dbcp.DBCPConnectionPool; import org.apache.nifi.dbcp.utils.DBCPProperties; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.PostgreSQLDatabaseAdapter; import org.apache.nifi.reporting.InitializationException; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; import org.testcontainers.containers.PostgreSQLContainer; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - public class QueryDatabaseTableIT extends QueryDatabaseTableTest { private static PostgreSQLContainer postgres; @@ -48,8 +42,8 @@ public static void cleanUpAfterClass() { } @Override - public DatabaseAdapter createDatabaseAdapter() { - return new PostgreSQLDatabaseAdapter(); + public String getDatabaseType() { + return "PostgreSQL"; } @Override @@ -62,13 +56,4 @@ public void createDbcpControllerService() throws InitializationException { runner.setProperty(connectionPool, DBCPProperties.DB_DRIVERNAME, postgres.getDriverClassName()); runner.enableControllerService(connectionPool); } - - @Test - public void testAddedRowsAutoCommitTrue() { - // this test in the base class is not valid for PostgreSQL so check the validation error message. - final AssertionError assertionError = assertThrows(AssertionError.class, super::testAddedRowsAutoCommitTrue); - assertEquals(assertionError.getMessage(), "Processor has 1 validation failures:\n" + - "'Set Auto Commit' validated against 'true' is invalid because 'Set Auto Commit' " + - "must be set to 'false' because 'PostgreSQL' Database Type requires it to be 'false'\n"); - } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordIT.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordIT.java index a8e57c3d59fe..391cead1b5f2 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordIT.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordIT.java @@ -18,17 +18,11 @@ import org.apache.nifi.dbcp.DBCPConnectionPool; import org.apache.nifi.dbcp.utils.DBCPProperties; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.PostgreSQLDatabaseAdapter; import org.apache.nifi.reporting.InitializationException; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; import org.testcontainers.containers.PostgreSQLContainer; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - public class QueryDatabaseTableRecordIT extends QueryDatabaseTableRecordTest { private static PostgreSQLContainer postgres; @@ -48,8 +42,8 @@ public static void cleanUpAfterClass() { } @Override - public DatabaseAdapter createDatabaseAdapter() { - return new PostgreSQLDatabaseAdapter(); + public String getDatabaseType() { + return "PostgreSQL"; } @Override @@ -62,13 +56,4 @@ public void createDbcpControllerService() throws InitializationException { runner.setProperty(connectionPool, DBCPProperties.DB_DRIVERNAME, postgres.getDriverClassName()); runner.enableControllerService(connectionPool); } - - @Test - public void testAddedRowsAutoCommitTrue() { - // this test in the base class is not valid for PostgreSQL so check the validation error message. - final AssertionError assertionError = assertThrows(AssertionError.class, super::testAddedRowsAutoCommitTrue); - assertEquals(assertionError.getMessage(), "Processor has 1 validation failures:\n" + - "'Set Auto Commit' validated against 'true' is invalid because 'Set Auto Commit' " + - "must be set to 'false' because 'PostgreSQL' Database Type requires it to be 'false'\n"); - } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordTest.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordTest.java index 7bf922727c87..5ca58b4a2d3c 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordTest.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecordTest.java @@ -18,16 +18,9 @@ import org.apache.nifi.annotation.behavior.Stateful; import org.apache.nifi.components.state.Scope; -import org.apache.nifi.components.state.StateManager; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.processor.exception.ProcessException; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.GenericDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.MSSQLDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.MySQLDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.OracleDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.PhoenixDatabaseAdapter; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.serialization.record.MockRecordWriter; import org.apache.nifi.util.MockFlowFile; @@ -47,17 +40,13 @@ import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; -import java.sql.Types; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -72,8 +61,6 @@ public class QueryDatabaseTableRecordTest { MockQueryDatabaseTableRecord processor; protected TestRunner runner; private final static String DB_LOCATION = "target/db_qdt"; - private DatabaseAdapter dbAdapter; - private HashMap origDbAdapters; private final static String TABLE_NAME_KEY = "tableName"; private final static String MAX_ROWS_KEY = "maxRows"; @@ -108,8 +95,8 @@ public static void cleanUpAfterClass() { System.clearProperty("derby.stream.error.file"); } - public DatabaseAdapter createDatabaseAdapter() { - return new GenericDatabaseAdapter(); + public String getDatabaseType() { + return "Generic"; } public void createDbcpControllerService() throws InitializationException { @@ -121,14 +108,11 @@ public void createDbcpControllerService() throws InitializationException { @BeforeEach public void setup() throws InitializationException, IOException { - origDbAdapters = new HashMap<>(QueryDatabaseTableRecord.dbAdapters); - dbAdapter = createDatabaseAdapter(); - QueryDatabaseTableRecord.dbAdapters.put(dbAdapter.getName(), dbAdapter); processor = new MockQueryDatabaseTableRecord(); runner = TestRunners.newTestRunner(processor); createDbcpControllerService(); runner.setProperty(QueryDatabaseTableRecord.DBCP_SERVICE, "dbcp"); - runner.setProperty(QueryDatabaseTableRecord.DB_TYPE, dbAdapter.getName()); + runner.setProperty(QueryDatabaseTableRecord.DB_TYPE, getDatabaseType()); runner.getStateManager().clear(Scope.CLUSTER); MockRecordWriter recordWriter = new MockRecordWriter(null, true, -1); runner.addControllerService("writer", recordWriter); @@ -142,101 +126,6 @@ public void setup() throws InitializationException, IOException { public void teardown() throws IOException { runner.getStateManager().clear(Scope.CLUSTER); runner = null; - QueryDatabaseTableRecord.dbAdapters.clear(); - QueryDatabaseTableRecord.dbAdapters.putAll(origDbAdapters); - } - - @Test - public void testGetQuery() throws Exception { - String query = processor.getQuery(dbAdapter, "myTable", null, null, null, null); - assertEquals("SELECT * FROM myTable", query); - query = processor.getQuery(dbAdapter, "myTable", "col1,col2", null, null, null); - assertEquals("SELECT col1,col2 FROM myTable", query); - - query = processor.getQuery(dbAdapter, "myTable", null, Collections.singletonList("id"), null, null); - assertEquals("SELECT * FROM myTable", query); - - Map maxValues = new HashMap<>(); - maxValues.put("id", "509"); - StateManager stateManager = runner.getStateManager(); - stateManager.setState(maxValues, Scope.CLUSTER); - processor.putColumnType(AbstractDatabaseFetchProcessor.getStateKey("mytable", "id", dbAdapter), Types.INTEGER); - query = processor.getQuery(dbAdapter, "myTable", null, Collections.singletonList("id"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509", query); - - maxValues.put("date_created", "2016-03-07 12:34:56"); - stateManager.setState(maxValues, Scope.CLUSTER); - processor.putColumnType(AbstractDatabaseFetchProcessor.getStateKey("mytable", "date_created", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= '2016-03-07 12:34:56'", query); - - // Double quotes can be used to escape column and table names with most ANSI compatible database engines. - maxValues.put("mytable@!@date-created", "2016-03-07 12:34:56"); - stateManager.setState(maxValues, Scope.CLUSTER); - processor.putColumnType(AbstractDatabaseFetchProcessor.getStateKey("\"myTable\"", "\"DATE-CREATED\"", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "\"myTable\"", null, Arrays.asList("id", "\"DATE-CREATED\""), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM \"myTable\" WHERE id > 509 AND \"DATE-CREATED\" >= '2016-03-07 12:34:56'", query); - - // Back-ticks can be used to escape MySQL column and table names. - dbAdapter = new MySQLDatabaseAdapter(); - processor.putColumnType(AbstractDatabaseFetchProcessor.getStateKey("`myTable`", "`DATE-CREATED`", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "`myTable`", null, Arrays.asList("id", "`DATE-CREATED`"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM `myTable` WHERE id > 509 AND `DATE-CREATED` >= '2016-03-07 12:34:56'", query); - - // Square brackets can be used to escape Microsoft SQL Server column and table names. - dbAdapter = new MSSQLDatabaseAdapter(); - processor.putColumnType(AbstractDatabaseFetchProcessor.getStateKey("[myTable]", "[DATE-CREATED]", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "[myTable]", null, Arrays.asList("id", "[DATE-CREATED]"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM [myTable] WHERE id > 509 AND [DATE-CREATED] >= '2016-03-07 12:34:56'", query); - - // Test Oracle strategy - dbAdapter = new OracleDatabaseAdapter(); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= timestamp '2016-03-07 12:34:56' AND (type = \"CUSTOMER\")", query); - - // Test time. - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "time_created", Types.TIME); - maxValues.clear(); - maxValues.put("id", "509"); - maxValues.put("time_created", "12:34:57"); - maxValues.put("date_created", "2016-03-07 12:34:56"); - stateManager = runner.getStateManager(); - stateManager.clear(Scope.CLUSTER); - stateManager.setState(maxValues, Scope.CLUSTER); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= timestamp '2016-03-07 12:34:56' AND TIME_CREATED >= timestamp '12:34:57' AND (type = \"CUSTOMER\")", query); - dbAdapter = new GenericDatabaseAdapter(); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= '2016-03-07 12:34:56' AND TIME_CREATED >= '12:34:57' AND (type = \"CUSTOMER\")", query); - } - - @Test - public void testGetQueryUsingPhoenixAdapter() throws Exception { - Map maxValues = new HashMap<>(); - StateManager stateManager = runner.getStateManager(); - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "id", Types.INTEGER); - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "time_created", Types.TIME); - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "date_created", Types.TIMESTAMP); - - maxValues.put("id", "509"); - maxValues.put("time_created", "12:34:57"); - maxValues.put("date_created", "2016-03-07 12:34:56"); - stateManager.setState(maxValues, Scope.CLUSTER); - - dbAdapter = new PhoenixDatabaseAdapter(); - String query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= timestamp '2016-03-07 12:34:56' AND TIME_CREATED >= time '12:34:57' AND (type = \"CUSTOMER\")", query); - // Cover the other path - dbAdapter = new GenericDatabaseAdapter(); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= '2016-03-07 12:34:56' AND TIME_CREATED >= '12:34:57' AND (type = \"CUSTOMER\")", query); - } - - @Test - public void testGetQueryNoTable() { - assertThrows(IllegalArgumentException.class, () -> { - processor.getQuery(dbAdapter, null, null, null, null, null); - }); } @Test @@ -663,38 +552,6 @@ public void testWithNullIntColumn() throws SQLException { runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).get(0).assertAttributeEquals(QueryDatabaseTableRecord.RESULT_ROW_COUNT, "2"); } - @Test - public void testWithRuntimeException() throws SQLException { - // load test data to database - final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); - Statement stmt = con.createStatement(); - - try { - stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { - // Ignore, usually due to Derby not having DROP TABLE IF EXISTS - } - - stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); - - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (0, NULL, 1)"); - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (1, 1, 1)"); - - runner.setIncomingConnection(false); - runner.setProperty(QueryDatabaseTableRecord.TABLE_NAME, "TEST_NULL_INT"); - runner.setProperty(AbstractDatabaseFetchProcessor.MAX_VALUE_COLUMN_NAMES, "id"); - - QueryDatabaseTableRecord.dbAdapters.put(dbAdapter.getName(), new GenericDatabaseAdapter() { - @Override - public String getName() { - throw new RuntimeException("test"); - } - }); - runner.run(); - - assertTrue(runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).isEmpty()); - } - @Test public void testWithSqlException() throws SQLException { // load test data to database @@ -1429,52 +1286,6 @@ public void testMissingColumn() throws ProcessException, SQLException { }); } - @Test - public void testWithExceptionAfterSomeRowsProcessed() throws SQLException { - // load test data to database - final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); - Statement stmt = con.createStatement(); - - try { - stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { - // Ignore, usually due to Derby not having DROP TABLE IF EXISTS - } - - stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); - - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (1, NULL, 1)"); - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (2, 1, 1)"); - - runner.setIncomingConnection(false); - runner.setProperty(QueryDatabaseTableRecord.TABLE_NAME, "TEST_NULL_INT"); - runner.setProperty(AbstractDatabaseFetchProcessor.MAX_VALUE_COLUMN_NAMES, "id"); - - // Override adapter with one that fails after the first row is processed - QueryDatabaseTableRecord.dbAdapters.put(dbAdapter.getName(), new GenericDatabaseAdapter() { - boolean fail = false; - - @Override - public String getName() { - if (!fail) { - fail = true; - return super.getName(); - } - throw new RuntimeException("test"); - } - }); - runner.run(); - assertTrue(runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).isEmpty()); - // State should not have been updated - runner.getStateManager().assertStateNotSet("test_null_int@!@id", Scope.CLUSTER); - - // Restore original (working) adapter and run again - QueryDatabaseTableRecord.dbAdapters.put(dbAdapter.getName(), dbAdapter); - runner.run(); - assertFalse(runner.getFlowFilesForRelationship(QueryDatabaseTableRecord.REL_SUCCESS).isEmpty()); - runner.getStateManager().assertStateEquals("test_null_int@!@id", "2", Scope.CLUSTER); - } - /** * Simple implementation only for QueryDatabaseTableRecord processor testing. */ diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableTest.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableTest.java index e5e7daa27cda..ae3fb59853d5 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableTest.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/QueryDatabaseTableTest.java @@ -17,22 +17,14 @@ package org.apache.nifi.processors.standard; import org.apache.avro.file.DataFileStream; -import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumReader; import org.apache.nifi.annotation.behavior.Stateful; import org.apache.nifi.components.state.Scope; -import org.apache.nifi.components.state.StateManager; import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.processor.exception.ProcessException; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.GenericDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.MSSQLDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.MySQLDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.OracleDatabaseAdapter; -import org.apache.nifi.processors.standard.db.impl.PhoenixDatabaseAdapter; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.TestRunner; @@ -52,17 +44,13 @@ import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; -import java.sql.Types; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -75,8 +63,6 @@ public class QueryDatabaseTableTest { MockQueryDatabaseTable processor; protected TestRunner runner; private final static String DB_LOCATION = "target/db_qdt"; - private DatabaseAdapter dbAdapter; - private HashMap origDbAdapters; private final static String TABLE_NAME_KEY = "tableName"; private final static String MAX_ROWS_KEY = "maxRows"; @@ -112,10 +98,6 @@ public static void cleanUpAfterClass() { System.clearProperty("derby.stream.error.file"); } - public DatabaseAdapter createDatabaseAdapter() { - return new GenericDatabaseAdapter(); - } - public void createDbcpControllerService() throws InitializationException { final DBCPService dbcp = new DBCPServiceSimpleImpl(); final Map dbcpProperties = new HashMap<>(); @@ -125,120 +107,26 @@ public void createDbcpControllerService() throws InitializationException { @BeforeEach public void setup() throws InitializationException, IOException { - origDbAdapters = new HashMap<>(QueryDatabaseTable.dbAdapters); - dbAdapter = createDatabaseAdapter(); - QueryDatabaseTable.dbAdapters.put(dbAdapter.getName(), dbAdapter); processor = new MockQueryDatabaseTable(); runner = TestRunners.newTestRunner(processor); createDbcpControllerService(); runner.setProperty(QueryDatabaseTable.DBCP_SERVICE, "dbcp"); - runner.setProperty(QueryDatabaseTable.DB_TYPE, dbAdapter.getName()); + runner.setProperty(QueryDatabaseTable.DB_TYPE, getDatabaseType()); runner.getStateManager().clear(Scope.CLUSTER); } + public String getDatabaseType() { + return "Generic"; + } + @AfterEach public void teardown() throws IOException { runner.getStateManager().clear(Scope.CLUSTER); runner = null; - QueryDatabaseTable.dbAdapters.clear(); - QueryDatabaseTable.dbAdapters.putAll(origDbAdapters); - } - - @Test - public void testGetQuery() throws Exception { - String query = processor.getQuery(dbAdapter, "myTable", null, null, null, null); - assertEquals("SELECT * FROM myTable", query); - query = processor.getQuery(dbAdapter, "myTable", "col1,col2", null, null, null); - assertEquals("SELECT col1,col2 FROM myTable", query); - - query = processor.getQuery(dbAdapter, "myTable", null, Collections.singletonList("id"), null, null); - assertEquals("SELECT * FROM myTable", query); - - Map maxValues = new HashMap<>(); - maxValues.put("id", "509"); - StateManager stateManager = runner.getStateManager(); - stateManager.setState(maxValues, Scope.CLUSTER); - processor.putColumnType(processor.getStateKey("mytable", "id", dbAdapter), Types.INTEGER); - query = processor.getQuery(dbAdapter, "myTable", null, Collections.singletonList("id"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509", query); - - maxValues.put("date_created", "2016-03-07 12:34:56"); - stateManager.setState(maxValues, Scope.CLUSTER); - processor.putColumnType(processor.getStateKey("mytable", "date_created", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= '2016-03-07 12:34:56'", query); - - // Double quotes can be used to escape column and table names with most ANSI compatible database engines. - maxValues.put("mytable@!@date-created", "2016-03-07 12:34:56"); - stateManager.setState(maxValues, Scope.CLUSTER); - processor.putColumnType(processor.getStateKey("\"myTable\"", "\"DATE-CREATED\"", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "\"myTable\"", null, Arrays.asList("id", "\"DATE-CREATED\""), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM \"myTable\" WHERE id > 509 AND \"DATE-CREATED\" >= '2016-03-07 12:34:56'", query); - - // Back-ticks can be used to escape MySQL column and table names. - dbAdapter = new MySQLDatabaseAdapter(); - processor.putColumnType(processor.getStateKey("`myTable`", "`DATE-CREATED`", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "`myTable`", null, Arrays.asList("id", "`DATE-CREATED`"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM `myTable` WHERE id > 509 AND `DATE-CREATED` >= '2016-03-07 12:34:56'", query); - - // Square brackets can be used to escape Microsoft SQL Server column and table names. - dbAdapter = new MSSQLDatabaseAdapter(); - processor.putColumnType(processor.getStateKey("[myTable]", "[DATE-CREATED]", dbAdapter), Types.TIMESTAMP); - query = processor.getQuery(dbAdapter, "[myTable]", null, Arrays.asList("id", "[DATE-CREATED]"), null, stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM [myTable] WHERE id > 509 AND [DATE-CREATED] >= '2016-03-07 12:34:56'", query); - - // Test Oracle strategy - dbAdapter = new OracleDatabaseAdapter(); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= timestamp '2016-03-07 12:34:56' AND (type = \"CUSTOMER\")", query); - - // Test time. - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "time_created", Types.TIME); - maxValues.clear(); - maxValues.put("id", "509"); - maxValues.put("time_created", "12:34:57"); - maxValues.put("date_created", "2016-03-07 12:34:56"); - stateManager = runner.getStateManager(); - stateManager.clear(Scope.CLUSTER); - stateManager.setState(maxValues, Scope.CLUSTER); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= timestamp '2016-03-07 12:34:56' AND TIME_CREATED >= timestamp '12:34:57' AND (type = \"CUSTOMER\")", query); - dbAdapter = new GenericDatabaseAdapter(); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= '2016-03-07 12:34:56' AND TIME_CREATED >= '12:34:57' AND (type = \"CUSTOMER\")", query); - } - - @Test - public void testGetQueryUsingPhoenixAdapter() throws Exception { - Map maxValues = new HashMap<>(); - StateManager stateManager = runner.getStateManager(); - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "id", Types.INTEGER); - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "time_created", Types.TIME); - processor.putColumnType("mytable" + AbstractDatabaseFetchProcessor.NAMESPACE_DELIMITER + "date_created", Types.TIMESTAMP); - - maxValues.put("id", "509"); - maxValues.put("time_created", "12:34:57"); - maxValues.put("date_created", "2016-03-07 12:34:56"); - stateManager.setState(maxValues, Scope.CLUSTER); - - dbAdapter = new PhoenixDatabaseAdapter(); - String query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= timestamp '2016-03-07 12:34:56' AND TIME_CREATED >= time '12:34:57' AND (type = \"CUSTOMER\")", query); - // Cover the other path - dbAdapter = new GenericDatabaseAdapter(); - query = processor.getQuery(dbAdapter, "myTable", null, Arrays.asList("id", "DATE_CREATED", "TIME_CREATED"), "type = \"CUSTOMER\"", stateManager.getState(Scope.CLUSTER).toMap()); - assertEquals("SELECT * FROM myTable WHERE id > 509 AND DATE_CREATED >= '2016-03-07 12:34:56' AND TIME_CREATED >= '12:34:57' AND (type = \"CUSTOMER\")", query); - } - - @Test - public void testGetQueryNoTable() { - assertThrows(IllegalArgumentException.class, () -> { - processor.getQuery(dbAdapter, null, null, null, null, null); - }); } @Test - public void testAddedRows() throws ClassNotFoundException, SQLException, InitializationException, IOException { + public void testAddedRows() throws SQLException, IOException { // load test data to database final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); @@ -675,38 +563,6 @@ public void testWithNullIntColumn() throws SQLException { runner.getFlowFilesForRelationship(QueryDatabaseTable.REL_SUCCESS).get(0).assertAttributeEquals(QueryDatabaseTable.RESULT_ROW_COUNT, "2"); } - @Test - public void testWithRuntimeException() throws SQLException { - // load test data to database - final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); - Statement stmt = con.createStatement(); - - try { - stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { - // Ignore, usually due to Derby not having DROP TABLE IF EXISTS - } - - stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); - - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (0, NULL, 1)"); - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (1, 1, 1)"); - - runner.setIncomingConnection(false); - runner.setProperty(QueryDatabaseTable.TABLE_NAME, "TEST_NULL_INT"); - runner.setProperty(AbstractDatabaseFetchProcessor.MAX_VALUE_COLUMN_NAMES, "id"); - - QueryDatabaseTable.dbAdapters.put(dbAdapter.getName(), new GenericDatabaseAdapter() { - @Override - public String getName() { - throw new DataFileWriter.AppendWriteException(null); - } - }); - runner.run(); - - assertTrue(runner.getFlowFilesForRelationship(QueryDatabaseTable.REL_SUCCESS).isEmpty()); - } - @Test public void testWithSqlException() throws SQLException { // load test data to database @@ -1462,51 +1318,6 @@ public void testMissingColumn() throws ProcessException, ClassNotFoundException, }); } - @Test - public void testWithExceptionAfterSomeRowsProcessed() throws SQLException { - // load test data to database - final Connection con = ((DBCPService) runner.getControllerService("dbcp")).getConnection(); - Statement stmt = con.createStatement(); - - try { - stmt.execute("drop table TEST_NULL_INT"); - } catch (final SQLException sqle) { - // Ignore, usually due to Derby not having DROP TABLE IF EXISTS - } - - stmt.execute("create table TEST_NULL_INT (id integer not null, val1 integer, val2 integer, constraint my_pk primary key (id))"); - - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (1, NULL, 1)"); - stmt.execute("insert into TEST_NULL_INT (id, val1, val2) VALUES (2, 1, 1)"); - - runner.setIncomingConnection(false); - runner.setProperty(QueryDatabaseTable.TABLE_NAME, "TEST_NULL_INT"); - runner.setProperty(AbstractDatabaseFetchProcessor.MAX_VALUE_COLUMN_NAMES, "id"); - - // Override adapter with one that fails after the first row is processed - QueryDatabaseTable.dbAdapters.put(dbAdapter.getName(), new GenericDatabaseAdapter() { - boolean fail = false; - @Override - public String getName() { - if (!fail) { - fail = true; - return super.getName(); - } - throw new DataFileWriter.AppendWriteException(null); - } - }); - runner.run(); - assertTrue(runner.getFlowFilesForRelationship(QueryDatabaseTable.REL_SUCCESS).isEmpty()); - // State should not have been updated - runner.getStateManager().assertStateNotSet("test_null_int@!@id", Scope.CLUSTER); - - // Restore original (working) adapter and run again - QueryDatabaseTable.dbAdapters.put(dbAdapter.getName(), dbAdapter); - runner.run(); - assertFalse(runner.getFlowFilesForRelationship(QueryDatabaseTable.REL_SUCCESS).isEmpty()); - runner.getStateManager().assertStateEquals("test_null_int@!@id", "2", Scope.CLUSTER); - } - private long getNumberOfRecordsFromStream(InputStream in) throws IOException { final DatumReader datumReader = new GenericDatumReader<>(); try (DataFileStream dataFileReader = new DataFileStream<>(in, datumReader)) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java index f30fdba60a8f..4722745fc69e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGenerateTableFetch.java @@ -22,7 +22,6 @@ import org.apache.nifi.controller.AbstractControllerService; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.processor.exception.ProcessException; -import org.apache.nifi.processors.standard.db.impl.DerbyDatabaseAdapter; import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.MockProcessSession; import org.apache.nifi.util.MockSessionFactory; @@ -119,7 +118,7 @@ public void setUp() throws Exception { runner.addControllerService("dbcp", dbcp, dbcpProperties); runner.enableControllerService(dbcp); runner.setProperty(GenerateTableFetch.DBCP_SERVICE, "dbcp"); - runner.setProperty(DB_TYPE, new DerbyDatabaseAdapter().getName()); + runner.setProperty(DB_TYPE, "Derby"); } @Test diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQL2008DatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQL2008DatabaseAdapter.java index 5ed921b06bfe..33c5f4af558f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQL2008DatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQL2008DatabaseAdapter.java @@ -16,14 +16,13 @@ */ package org.apache.nifi.processors.standard.db.impl; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class TestMSSQL2008DatabaseAdapter { - private final DatabaseAdapter db = new MSSQL2008DatabaseAdapter(); + private final MSSQL2008DatabaseAdapter db = new MSSQL2008DatabaseAdapter(); @Test public void testGeneration() { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQLDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQLDatabaseAdapter.java index dea494e937b5..e492b00b10a0 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQLDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestMSSQLDatabaseAdapter.java @@ -16,14 +16,13 @@ */ package org.apache.nifi.processors.standard.db.impl; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class TestMSSQLDatabaseAdapter { - final DatabaseAdapter db = new MSSQLDatabaseAdapter(); + final MSSQLDatabaseAdapter db = new MSSQLDatabaseAdapter(); @Test public void testGeneration() { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracle12DatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracle12DatabaseAdapter.java index 1409f253a085..0f24374f989f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracle12DatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracle12DatabaseAdapter.java @@ -25,7 +25,6 @@ import org.apache.nifi.processors.standard.db.ColumnDescription; import org.apache.nifi.processors.standard.db.NameNormalizer; import org.apache.nifi.processors.standard.db.NameNormalizerFactory; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; import org.apache.nifi.processors.standard.db.TableSchema; import org.apache.nifi.processors.standard.db.TranslationStrategy; import org.junit.jupiter.api.Test; @@ -36,7 +35,7 @@ public class TestOracle12DatabaseAdapter { - private final DatabaseAdapter db = new Oracle12DatabaseAdapter(); + private final Oracle12DatabaseAdapter db = new Oracle12DatabaseAdapter(); @Test public void testGeneration() { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracleDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracleDatabaseAdapter.java index 99b8e3e3bd8a..a2e8dd13d259 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracleDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/db/impl/TestOracleDatabaseAdapter.java @@ -19,7 +19,6 @@ import org.apache.nifi.processors.standard.db.ColumnDescription; import org.apache.nifi.processors.standard.db.NameNormalizer; import org.apache.nifi.processors.standard.db.NameNormalizerFactory; -import org.apache.nifi.processors.standard.db.DatabaseAdapter; import org.apache.nifi.processors.standard.db.TableSchema; import org.apache.nifi.processors.standard.db.TranslationStrategy; import org.junit.jupiter.api.Test; @@ -35,7 +34,7 @@ public class TestOracleDatabaseAdapter { - private final DatabaseAdapter db = new OracleDatabaseAdapter(); + private final OracleDatabaseAdapter db = new OracleDatabaseAdapter(); @Test public void testGeneration() { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/resources/PutDatabaseRecordIT/create-person-table.sql b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/resources/PutDatabaseRecordIT/create-person-table.sql index eed14bb75ed7..162e15d925e3 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/resources/PutDatabaseRecordIT/create-person-table.sql +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/resources/PutDatabaseRecordIT/create-person-table.sql @@ -1,5 +1,5 @@ CREATE TABLE person ( - name VARCHAR(255) NOT NULL, + name VARCHAR(255) NOT NULL PRIMARY KEY, age INT, favorite_color VARCHAR(255), dob DATE, diff --git a/nifi-extension-bundles/nifi-standard-services-api-bom/pom.xml b/nifi-extension-bundles/nifi-standard-services-api-bom/pom.xml index 22c0ac2af8cb..d4e6690387ff 100644 --- a/nifi-extension-bundles/nifi-standard-services-api-bom/pom.xml +++ b/nifi-extension-bundles/nifi-standard-services-api-bom/pom.xml @@ -72,6 +72,12 @@ 2.2.0-SNAPSHOT provided
+ + org.apache.nifi + nifi-database-dialect-service-api + 2.2.0-SNAPSHOT + provided + org.apache.nifi nifi-dbcp-service-api diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/pom.xml b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/pom.xml new file mode 100644 index 000000000000..b4e35023d519 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/pom.xml @@ -0,0 +1,24 @@ + + + + 4.0.0 + + org.apache.nifi + nifi-standard-services + 2.2.0-SNAPSHOT + + nifi-database-dialect-service-api + diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/ColumnDefinition.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/ColumnDefinition.java new file mode 100644 index 000000000000..ce5973e9a6ec --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/ColumnDefinition.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.util.Optional; + +/** + * Database Table Column Definition + */ +public interface ColumnDefinition { + String columnName(); + + int dataType(); + + Nullable nullable(); + + Optional defaultValue(); + + boolean primaryKey(); + + enum Nullable { + YES, + + NO, + + UNKNOWN + } +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/DatabaseDialectService.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/DatabaseDialectService.java new file mode 100644 index 000000000000..c5fa1537e45e --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/DatabaseDialectService.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import org.apache.nifi.controller.ControllerService; + +import java.util.Set; + +/** + * Abstraction responsible for returning SQL statements and attributes specific to database services + */ +public interface DatabaseDialectService extends ControllerService { + /** + * Get SQL Statement based on request properties + * + * @param statementRequest Statement request + * @return Statement Response containing rendered SQL + */ + StatementResponse getStatement(StatementRequest statementRequest); + + /** + * Get SQL Statement Types supported in the Database Dialect Service + * + * @return Set of supported SQL Statement Types + */ + Set getSupportedStatementTypes(); +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/PageRequest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/PageRequest.java new file mode 100644 index 000000000000..a50c71c88c8f --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/PageRequest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.util.Optional; +import java.util.OptionalLong; + +/** + * Request for page of results with starting offset and total limit with optional index column + */ +public interface PageRequest { + long offset(); + + OptionalLong limit(); + + Optional indexColumnName(); +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryClause.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryClause.java new file mode 100644 index 000000000000..97ecfff16867 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryClause.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * Query Clause definition containing Clause Type and associated criteria + * + * @param queryClauseType Query Clause Type + * @param criteria Criteria associated with Query Clause Type + */ +public record QueryClause( + QueryClauseType queryClauseType, + String criteria +) { +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryClauseType.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryClauseType.java new file mode 100644 index 000000000000..dfdeb6af9e57 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryClauseType.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * SQL Query Clause Type for building SELECT statements + */ +public enum QueryClauseType { + WHERE, + + ORDER_BY +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryStatementRequest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryStatementRequest.java new file mode 100644 index 000000000000..3601cb1b9186 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/QueryStatementRequest.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.util.Collection; +import java.util.Optional; + +/** + * Query extension of Statement Request with additional properties for SELECT statements + */ +public interface QueryStatementRequest extends StatementRequest { + Optional derivedTable(); + + Collection queryClauses(); + + Optional pageRequest(); +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardColumnDefinition.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardColumnDefinition.java new file mode 100644 index 000000000000..b8c41797a51a --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardColumnDefinition.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.sql.Types; +import java.util.Optional; + +/** + * Standard Database Table Column definition + * + * @param columnName Table Column Name + * @param dataType SQL type from java.sql.Types + * @param nullable Nullable status corresponding to java.sql.DatabaseMetaData IS_NULLABLE + * @param defaultValue Default column value may be empty + * @param primaryKey Primary Key status + */ +public record StandardColumnDefinition( + String columnName, + int dataType, + Nullable nullable, + Optional defaultValue, + boolean primaryKey +) implements ColumnDefinition { + /** + * Standard Column Definition with Column Name and defaults for other properties for queries + * + * @param columnName Table Column Name + */ + public StandardColumnDefinition(final String columnName) { + this(columnName, Types.VARCHAR, Nullable.UNKNOWN, Optional.empty(), false); + } +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardPageRequest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardPageRequest.java new file mode 100644 index 000000000000..a21ebb8ed811 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardPageRequest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.util.Optional; +import java.util.OptionalLong; + +/** + * Standard record implementation of Page Request + * + * @param offset Offset starting index of requested page of results + * @param limit Limit total number of results + * @param indexColumnName Column Name for indexing results or empty + */ +public record StandardPageRequest( + long offset, + OptionalLong limit, + Optional indexColumnName +) implements PageRequest { +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardQueryStatementRequest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardQueryStatementRequest.java new file mode 100644 index 000000000000..58b00846bfc3 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardQueryStatementRequest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.util.Collection; +import java.util.List; +import java.util.Optional; + +/** + * Standard implementation of Query Statement Request with required properties + * + * @param statementType SQL Statement Type + * @param tableDefinition Database Table Definition + * @param derivedTable Derived Table Query or empty when not defined + * @param queryClauses Collection of Query Clauses can be empty + * @param pageRequest Page Request can be empty + */ +public record StandardQueryStatementRequest( + StatementType statementType, + TableDefinition tableDefinition, + Optional derivedTable, + Collection queryClauses, + Optional pageRequest +) implements QueryStatementRequest { + /** + * Standard Query Statement Request without additional clauses + * + * @param statementType Statement Type + * @param tableDefinition Database Table Definition + */ + public StandardQueryStatementRequest(final StatementType statementType, final TableDefinition tableDefinition) { + this(statementType, tableDefinition, Optional.empty(), List.of(), Optional.empty()); + } +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardStatementRequest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardStatementRequest.java new file mode 100644 index 000000000000..8cf6bb187bad --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardStatementRequest.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * Standard implementation of Statement Request with required properties + * + * @param statementType SQL Statement Type + * @param tableDefinition Database Table Definition + */ +public record StandardStatementRequest( + StatementType statementType, + TableDefinition tableDefinition +) implements StatementRequest { +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardStatementResponse.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardStatementResponse.java new file mode 100644 index 000000000000..d3bb976c4390 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StandardStatementResponse.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * Standard implementation of Statement Response with required properties + * + * @param sql SQL Statement + */ +public record StandardStatementResponse( + String sql +) implements StatementResponse { +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementRequest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementRequest.java new file mode 100644 index 000000000000..bea151aa3cfb --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementRequest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * SQL Statement Request abstraction with minimum properties + */ +public interface StatementRequest { + /** + * SQL Statement Type requested + * + * @return SQL Statement Type + */ + StatementType statementType(); + + /** + * Database Table Definition for requested operation + * + * @return Database Table Definition + */ + TableDefinition tableDefinition(); +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementResponse.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementResponse.java new file mode 100644 index 000000000000..920b839c1d40 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementResponse.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * SQL Statement Response with standard properties + */ +public interface StatementResponse { + /** + * Rendered SQL statement + * + * @return SQL statement + */ + String sql(); +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementType.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementType.java new file mode 100644 index 000000000000..4e3593f7fb73 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/StatementType.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +/** + * SQL Statement Types including synthetic types for conditional handling + */ +public enum StatementType { + /** Alter table with new columns */ + ALTER, + /** Create table with specified columns */ + CREATE, + /** Insert records and ignore existing records with matching duplicate keys */ + INSERT_IGNORE, + /** Select records */ + SELECT, + /** Insert records or update records on matching duplicate keys */ + UPSERT +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/TableDefinition.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/TableDefinition.java new file mode 100644 index 000000000000..381a550667ff --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-api/src/main/java/org/apache/nifi/database/dialect/service/api/TableDefinition.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service.api; + +import java.util.List; +import java.util.Optional; + +/** + * Database Table definition + * + * @param catalog Database Catalog Name may be empty + * @param schemaName Database Schema Name may be empty + * @param tableName Database Table Name is required + * @param columns Database Table Column definitions + */ +public record TableDefinition( + Optional catalog, + Optional schemaName, + String tableName, + List columns +) { +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service-nar/pom.xml b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service-nar/pom.xml new file mode 100644 index 000000000000..c36175f3f644 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service-nar/pom.xml @@ -0,0 +1,39 @@ + + + + 4.0.0 + + org.apache.nifi + nifi-database-dialect-service-bundle + 2.2.0-SNAPSHOT + + nifi-database-dialect-service-nar + nar + + + + org.apache.nifi + nifi-database-dialect-service + 2.2.0-SNAPSHOT + + + org.apache.nifi + nifi-standard-services-api-nar + 2.2.0-SNAPSHOT + nar + + + diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/pom.xml b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/pom.xml new file mode 100644 index 000000000000..45879385c832 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/pom.xml @@ -0,0 +1,31 @@ + + + + 4.0.0 + + org.apache.nifi + nifi-database-dialect-service-bundle + 2.2.0-SNAPSHOT + + nifi-database-dialect-service + jar + + + org.apache.nifi + nifi-database-dialect-service-api + + + diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/main/java/org/apache/nifi/database/dialect/service/StandardDatabaseDialectService.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/main/java/org/apache/nifi/database/dialect/service/StandardDatabaseDialectService.java new file mode 100644 index 000000000000..382f537ccac9 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/main/java/org/apache/nifi/database/dialect/service/StandardDatabaseDialectService.java @@ -0,0 +1,326 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service; + +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.controller.AbstractControllerService; +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; +import org.apache.nifi.database.dialect.service.api.PageRequest; +import org.apache.nifi.database.dialect.service.api.QueryClause; +import org.apache.nifi.database.dialect.service.api.QueryClauseType; +import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardStatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; + +import java.sql.JDBCType; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.OptionalLong; +import java.util.Set; + +@CapabilityDescription(""" + Database Dialect Service supporting ANSI SQL. + Supported Statement Types: ALTER, CREATE, SELECT +""" +) +@Tags({ "Relational", "Database", "JDBC", "SQL" }) +public class StandardDatabaseDialectService extends AbstractControllerService implements DatabaseDialectService { + private static final char PERIOD_SEPARATOR = '.'; + + private static final char SPACE_SEPARATOR = ' '; + + private static final char COMMA_SEPARATOR = ','; + + private static final char ASTERISK_CHARACTER = '*'; + + private static final String NOT_NULL_QUALIFIER = "NOT NULL"; + + private static final String PRIMARY_KEY_QUALIFIER = "PRIMARY KEY"; + + private static final String WHERE_KEYWORD = "WHERE"; + + private static final String ORDER_BY_KEYWORD = "ORDER BY"; + + private static final String AND_KEYWORD = "AND"; + + private static final String LIMIT_KEYWORD = "LIMIT"; + + private static final String OFFSET_KEYWORD = "OFFSET"; + + private static final String GREATER_THAN_OR_EQUAL = ">="; + + private static final String LESS_THAN = "<"; + + private static final Set supportedStatementTypes = Set.of( + StatementType.ALTER, + StatementType.CREATE, + StatementType.SELECT + ); + + @Override + public StatementResponse getStatement(final StatementRequest statementRequest) { + Objects.requireNonNull(statementRequest, "Statement Request required"); + return getSupportedStatement(statementRequest); + } + + @Override + public Set getSupportedStatementTypes() { + return supportedStatementTypes; + } + + private StatementResponse getSupportedStatement(final StatementRequest statementRequest) { + final StatementType statementType = statementRequest.statementType(); + return switch (statementType) { + case ALTER -> getAlterStatement(statementRequest); + case CREATE -> getCreateStatement(statementRequest); + case SELECT -> getSelectStatement(statementRequest); + default -> throw new UnsupportedOperationException("Statement Type [%s] not handled".formatted(statementType)); + }; + } + + private StatementResponse getAlterStatement(final StatementRequest statementRequest) { + final TableDefinition tableDefinition = statementRequest.tableDefinition(); + final String qualifiedTableName = getQualifiedTableName(tableDefinition); + final String tableColumns = getAlterTableColumns(tableDefinition.columns()); + + final String sql = "ALTER TABLE %s ADD COLUMNS (%s)".formatted(qualifiedTableName, tableColumns); + return new StandardStatementResponse(sql); + } + + private String getAlterTableColumns(final List columnDefinitions) { + final StringBuilder tableColumns = new StringBuilder(); + + final Iterator columns = columnDefinitions.iterator(); + while (columns.hasNext()) { + final ColumnDefinition columnDefinition = columns.next(); + final String columnName = columnDefinition.columnName(); + final String jdbcTypeName = getJdbcTypeName(columnDefinition); + tableColumns.append(columnName); + tableColumns.append(SPACE_SEPARATOR); + tableColumns.append(jdbcTypeName); + + if (ColumnDefinition.Nullable.NO == columnDefinition.nullable()) { + tableColumns.append(SPACE_SEPARATOR); + tableColumns.append(NOT_NULL_QUALIFIER); + } + + if (columns.hasNext()) { + tableColumns.append(COMMA_SEPARATOR); + tableColumns.append(SPACE_SEPARATOR); + } + } + + return tableColumns.toString(); + } + + private StatementResponse getCreateStatement(final StatementRequest statementRequest) { + final TableDefinition tableDefinition = statementRequest.tableDefinition(); + final String qualifiedTableName = getQualifiedTableName(tableDefinition); + final String tableColumns = getCreateTableColumns(tableDefinition.columns()); + + final String sql = "CREATE TABLE %s (%s)".formatted(qualifiedTableName, tableColumns); + return new StandardStatementResponse(sql); + } + + private String getCreateTableColumns(final List columnDefinitions) { + final StringBuilder tableColumns = new StringBuilder(); + + final Iterator columns = columnDefinitions.iterator(); + while (columns.hasNext()) { + final ColumnDefinition columnDefinition = columns.next(); + final String columnName = columnDefinition.columnName(); + final String jdbcTypeName = getJdbcTypeName(columnDefinition); + tableColumns.append(columnName); + tableColumns.append(SPACE_SEPARATOR); + tableColumns.append(jdbcTypeName); + + if (ColumnDefinition.Nullable.NO == columnDefinition.nullable()) { + tableColumns.append(SPACE_SEPARATOR); + tableColumns.append(NOT_NULL_QUALIFIER); + } + + if (columnDefinition.primaryKey()) { + tableColumns.append(SPACE_SEPARATOR); + tableColumns.append(PRIMARY_KEY_QUALIFIER); + } + + if (columns.hasNext()) { + tableColumns.append(COMMA_SEPARATOR); + tableColumns.append(SPACE_SEPARATOR); + } + } + + return tableColumns.toString(); + } + + private String getSelectTableColumns(final List columnDefinitions) { + final StringBuilder tableColumns = new StringBuilder(); + + final Iterator columns = columnDefinitions.iterator(); + if (columns.hasNext()) { + while (columns.hasNext()) { + final ColumnDefinition columnDefinition = columns.next(); + final String columnName = columnDefinition.columnName(); + tableColumns.append(columnName); + + if (columns.hasNext()) { + tableColumns.append(COMMA_SEPARATOR); + tableColumns.append(SPACE_SEPARATOR); + } + } + } else { + tableColumns.append(ASTERISK_CHARACTER); + } + + return tableColumns.toString(); + } + + private StatementResponse getSelectStatement(final StatementRequest statementRequest) { + if (statementRequest instanceof QueryStatementRequest queryStatementRequest) { + final TableDefinition tableDefinition = queryStatementRequest.tableDefinition(); + final String qualifiedTableName = getQualifiedTableName(tableDefinition); + final Optional derivedTableFound = queryStatementRequest.derivedTable(); + + final String selectSql; + if (derivedTableFound.isPresent()) { + final String derivedTable = derivedTableFound.get(); + selectSql = "SELECT * FROM (%s) AS %s".formatted(derivedTable, qualifiedTableName); + } else { + final String tableColumns = getSelectTableColumns(tableDefinition.columns()); + selectSql = "SELECT %s FROM %s".formatted(tableColumns, qualifiedTableName); + } + + final StringBuilder sqlBuilder = new StringBuilder(selectSql); + + final Optional pageRequestFound = queryStatementRequest.pageRequest(); + + final Optional whereQueryClause = queryStatementRequest.queryClauses().stream() + .filter(queryClause -> QueryClauseType.WHERE == queryClause.queryClauseType()) + .findFirst(); + if (whereQueryClause.isPresent()) { + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(WHERE_KEYWORD); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(whereQueryClause.get().criteria()); + + // Add paging with index column specified + if (pageRequestFound.isPresent()) { + final PageRequest pageRequest = pageRequestFound.get(); + appendIndexedPageRequest(pageRequest, sqlBuilder); + } + } + + final Optional orderByQueryClause = queryStatementRequest.queryClauses().stream() + .filter(queryClause -> QueryClauseType.ORDER_BY == queryClause.queryClauseType()) + .findFirst(); + if (orderByQueryClause.isPresent()) { + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(ORDER_BY_KEYWORD); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(orderByQueryClause.get().criteria()); + } + + // Add paging without index column specified + if (pageRequestFound.isPresent()) { + final PageRequest pageRequest = pageRequestFound.get(); + appendPageRequest(pageRequest, sqlBuilder); + } + + return new StandardStatementResponse(sqlBuilder.toString()); + } else { + throw new IllegalArgumentException("Query Statement Request not found [%s]".formatted(statementRequest.getClass())); + } + } + + private void appendPageRequest(final PageRequest pageRequest, final StringBuilder sqlBuilder) { + final Optional indexColumnNameFound = pageRequest.indexColumnName(); + if (indexColumnNameFound.isEmpty()) { + final OptionalLong limitFound = pageRequest.limit(); + if (limitFound.isPresent()) { + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(LIMIT_KEYWORD); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(limitFound.getAsLong()); + } + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(OFFSET_KEYWORD); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(pageRequest.offset()); + } + } + + private void appendIndexedPageRequest(final PageRequest pageRequest, final StringBuilder sqlBuilder) { + final Optional indexColumnNameFound = pageRequest.indexColumnName(); + if (indexColumnNameFound.isPresent()) { + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(AND_KEYWORD); + sqlBuilder.append(SPACE_SEPARATOR); + + final String indexColumnName = indexColumnNameFound.get(); + sqlBuilder.append(indexColumnName); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(GREATER_THAN_OR_EQUAL); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(pageRequest.offset()); + + final OptionalLong limitFound = pageRequest.limit(); + if (limitFound.isPresent()) { + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(AND_KEYWORD); + sqlBuilder.append(SPACE_SEPARATOR); + + sqlBuilder.append(indexColumnName); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(LESS_THAN); + sqlBuilder.append(SPACE_SEPARATOR); + sqlBuilder.append(limitFound.getAsLong()); + } + } + } + + private String getQualifiedTableName(final TableDefinition tableDefinition) { + final StringBuilder builder = new StringBuilder(); + + final Optional catalog = tableDefinition.catalog(); + if (catalog.isPresent()) { + builder.append(catalog.get()); + builder.append(PERIOD_SEPARATOR); + } + + final Optional schemaName = tableDefinition.schemaName(); + if (schemaName.isPresent()) { + builder.append(schemaName.get()); + builder.append(PERIOD_SEPARATOR); + } + + builder.append(tableDefinition.tableName()); + return builder.toString(); + } + + private String getJdbcTypeName(final ColumnDefinition columnDefinition) { + final int dataType = columnDefinition.dataType(); + final JDBCType jdbcType = JDBCType.valueOf(dataType); + return jdbcType.getName(); + } +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService new file mode 100644 index 000000000000..de64e2459062 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService @@ -0,0 +1,15 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +org.apache.nifi.database.dialect.service.StandardDatabaseDialectService diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/test/java/org/apache/nifi/database/dialect/service/StandardDatabaseDialectServiceTest.java b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/test/java/org/apache/nifi/database/dialect/service/StandardDatabaseDialectServiceTest.java new file mode 100644 index 000000000000..24fca086e4d2 --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/nifi-database-dialect-service/src/test/java/org/apache/nifi/database/dialect/service/StandardDatabaseDialectServiceTest.java @@ -0,0 +1,347 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.nifi.database.dialect.service; + +import org.apache.nifi.database.dialect.service.api.ColumnDefinition; +import org.apache.nifi.database.dialect.service.api.PageRequest; +import org.apache.nifi.database.dialect.service.api.QueryClause; +import org.apache.nifi.database.dialect.service.api.QueryClauseType; +import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; +import org.apache.nifi.database.dialect.service.api.StandardPageRequest; +import org.apache.nifi.database.dialect.service.api.StandardQueryStatementRequest; +import org.apache.nifi.database.dialect.service.api.StandardStatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementRequest; +import org.apache.nifi.database.dialect.service.api.StatementResponse; +import org.apache.nifi.database.dialect.service.api.StatementType; +import org.apache.nifi.database.dialect.service.api.TableDefinition; +import org.apache.nifi.reporting.InitializationException; +import org.apache.nifi.util.NoOpProcessor; +import org.apache.nifi.util.TestRunner; +import org.apache.nifi.util.TestRunners; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.sql.Types; +import java.util.List; +import java.util.Optional; +import java.util.OptionalLong; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class StandardDatabaseDialectServiceTest { + + private static final String SERVICE_ID = StandardDatabaseDialectService.class.getSimpleName(); + + private static final String CATALOG = "APACHE"; + + private static final String SCHEMA_NAME = "NIFI"; + + private static final String TABLE_NAME = "RECORD"; + + private static final String ID_COLUMN_NAME = "ID"; + + private static final String LABEL_COLUMN_NAME = "LABEL"; + + private static final String CREATED_COLUMN_NAME = "CREATED"; + + private static final String ZERO_EQUALS_ONE = "0 = 1"; + + private static final String LABEL_IS_NOT_NULL = "LABEL IS NOT NULL"; + + private static final long LIMIT = 100; + + private static final long OFFSET = 25; + + private TestRunner runner; + + private StandardDatabaseDialectService service; + + @BeforeEach + void setRunner() throws InitializationException { + runner = TestRunners.newTestRunner(NoOpProcessor.class); + service = new StandardDatabaseDialectService(); + runner.addControllerService(SERVICE_ID, service); + } + + @Test + void testEnableDisable() { + runner.assertValid(service); + runner.enableControllerService(service); + runner.disableControllerService(service); + } + + @Test + void testGetSupportedStatementTypes() { + final Set supportedStatementTypes = service.getSupportedStatementTypes(); + + final Set expectedStatementTypes = Set.of( + StatementType.ALTER, + StatementType.CREATE, + StatementType.SELECT + ); + + assertEquals(expectedStatementTypes, supportedStatementTypes); + } + + @Test + void testUpsertStatementTypeUnsupported() { + final TableDefinition tableDefinition = getAlterTableDefinition(ColumnDefinition.Nullable.UNKNOWN); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.UPSERT, tableDefinition); + + assertThrows(UnsupportedOperationException.class, () -> service.getStatement(statementRequest)); + } + + @Test + void testAlterStatementType() { + final TableDefinition tableDefinition = getAlterTableDefinition(ColumnDefinition.Nullable.UNKNOWN); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.ALTER, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "ALTER TABLE %s ADD COLUMNS (%s TIMESTAMP NOT NULL, %s VARCHAR)".formatted(TABLE_NAME, CREATED_COLUMN_NAME, LABEL_COLUMN_NAME); + assertEquals(expected, sql); + } + + @Test + void testAlterStatementTypeNotNull() { + final TableDefinition tableDefinition = getAlterTableDefinition(ColumnDefinition.Nullable.NO); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.ALTER, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "ALTER TABLE %s ADD COLUMNS (%s TIMESTAMP NOT NULL, %s VARCHAR NOT NULL)".formatted(TABLE_NAME, CREATED_COLUMN_NAME, LABEL_COLUMN_NAME); + assertEquals(expected, sql); + } + + @Test + void testCreateStatementType() { + final TableDefinition tableDefinition = getCreateTableDefinition(ColumnDefinition.Nullable.UNKNOWN); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.CREATE, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "CREATE TABLE %s (%s INTEGER NOT NULL PRIMARY KEY, %s VARCHAR)".formatted(TABLE_NAME, ID_COLUMN_NAME, LABEL_COLUMN_NAME); + assertEquals(expected, sql); + } + + @Test + void testCreateStatementTypePrimaryKeyNotNull() { + final TableDefinition tableDefinition = getCreateTableDefinition(ColumnDefinition.Nullable.NO); + final StatementRequest statementRequest = new StandardStatementRequest(StatementType.CREATE, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "CREATE TABLE %s (%s INTEGER NOT NULL PRIMARY KEY, %s VARCHAR NOT NULL)".formatted(TABLE_NAME, ID_COLUMN_NAME, LABEL_COLUMN_NAME); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementType() { + final TableDefinition tableDefinition = getSelectTableDefinition(); + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT %s, %s FROM %s".formatted(ID_COLUMN_NAME, LABEL_COLUMN_NAME, TABLE_NAME); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypeColumnsUnspecified() { + final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), TABLE_NAME, List.of()); + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT * FROM %s".formatted(TABLE_NAME); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypeCatalogSchemaName() { + final TableDefinition selectTableDefinition = getSelectTableDefinition(); + final TableDefinition tableDefinition = new TableDefinition(Optional.of(CATALOG), Optional.of(SCHEMA_NAME), TABLE_NAME, selectTableDefinition.columns()); + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT %s, %s FROM %s.%s.%s".formatted(ID_COLUMN_NAME, LABEL_COLUMN_NAME, CATALOG, SCHEMA_NAME, TABLE_NAME); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypeDerivedTable() { + final TableDefinition tableDefinition = getSelectTableDefinition(); + final String derivedTable = "SELECT 1 AS ID"; + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition, Optional.of(derivedTable), List.of(), Optional.empty()); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT * FROM (%s) AS %s".formatted(derivedTable, TABLE_NAME); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypePageRequestLimitOffset() { + final TableDefinition tableDefinition = getSelectTableDefinition(); + + final PageRequest pageRequest = new StandardPageRequest(OFFSET, OptionalLong.of(LIMIT), Optional.empty()); + + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition, Optional.empty(), List.of(), Optional.of(pageRequest)); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT %s, %s FROM %s LIMIT %d OFFSET %d".formatted(ID_COLUMN_NAME, LABEL_COLUMN_NAME, TABLE_NAME, LIMIT, OFFSET); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypePageRequestIndexColumnName() { + final TableDefinition tableDefinition = getSelectTableDefinition(); + + final PageRequest pageRequest = new StandardPageRequest(OFFSET, OptionalLong.of(LIMIT), Optional.of(ID_COLUMN_NAME)); + final List queryClauses = List.of(new QueryClause(QueryClauseType.WHERE, LABEL_IS_NOT_NULL)); + + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition, Optional.empty(), queryClauses, Optional.of(pageRequest)); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT %s, %s FROM %s WHERE %s AND %s >= %d AND %s < %d".formatted( + ID_COLUMN_NAME, LABEL_COLUMN_NAME, TABLE_NAME, LABEL_IS_NOT_NULL, ID_COLUMN_NAME, OFFSET, ID_COLUMN_NAME, LIMIT + ); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypeWhereClause() { + final TableDefinition tableDefinition = getSelectTableDefinition(); + + final List queryClauses = List.of(new QueryClause(QueryClauseType.WHERE, ZERO_EQUALS_ONE)); + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition, Optional.empty(), queryClauses, Optional.empty()); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT %s, %s FROM %s WHERE %s".formatted(ID_COLUMN_NAME, LABEL_COLUMN_NAME, TABLE_NAME, ZERO_EQUALS_ONE); + assertEquals(expected, sql); + } + + @Test + void testSelectStatementTypeWhereClauseOrderByClause() { + final TableDefinition tableDefinition = getSelectTableDefinition(); + + final List queryClauses = List.of( + new QueryClause(QueryClauseType.WHERE, ZERO_EQUALS_ONE), + new QueryClause(QueryClauseType.ORDER_BY, LABEL_COLUMN_NAME) + ); + final StatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition, Optional.empty(), queryClauses, Optional.empty()); + + final StatementResponse statementResponse = service.getStatement(statementRequest); + + assertNotNull(statementResponse); + + final String sql = statementResponse.sql(); + final String expected = "SELECT %s, %s FROM %s WHERE %s ORDER BY %s".formatted(ID_COLUMN_NAME, LABEL_COLUMN_NAME, TABLE_NAME, ZERO_EQUALS_ONE, LABEL_COLUMN_NAME); + assertEquals(expected, sql); + } + + private TableDefinition getSelectTableDefinition() { + final List columnDefinitions = List.of( + new StandardColumnDefinition(ID_COLUMN_NAME), + new StandardColumnDefinition(LABEL_COLUMN_NAME) + ); + + return new TableDefinition(Optional.empty(), Optional.empty(), TABLE_NAME, columnDefinitions); + } + + private TableDefinition getAlterTableDefinition(final ColumnDefinition.Nullable nullable) { + final List columnDefinitions = List.of( + new StandardColumnDefinition( + CREATED_COLUMN_NAME, + Types.TIMESTAMP, + ColumnDefinition.Nullable.NO, + Optional.empty(), + false + ), + new StandardColumnDefinition( + LABEL_COLUMN_NAME, + Types.VARCHAR, + nullable, + Optional.empty(), + false + ) + ); + + return new TableDefinition(Optional.empty(), Optional.empty(), TABLE_NAME, columnDefinitions); + } + + private TableDefinition getCreateTableDefinition(final ColumnDefinition.Nullable nullable) { + final List columnDefinitions = List.of( + new StandardColumnDefinition( + ID_COLUMN_NAME, + Types.INTEGER, + ColumnDefinition.Nullable.NO, + Optional.empty(), + true + ), + new StandardColumnDefinition( + LABEL_COLUMN_NAME, + Types.VARCHAR, + nullable, + Optional.empty(), + false + ) + ); + + return new TableDefinition(Optional.empty(), Optional.empty(), TABLE_NAME, columnDefinitions); + } +} diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/pom.xml b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/pom.xml new file mode 100644 index 000000000000..8030e5e6254a --- /dev/null +++ b/nifi-extension-bundles/nifi-standard-services/nifi-database-dialect-service-bundle/pom.xml @@ -0,0 +1,29 @@ + + + + 4.0.0 + + org.apache.nifi + nifi-standard-services + 2.2.0-SNAPSHOT + + nifi-database-dialect-service-bundle + pom + + nifi-database-dialect-service + nifi-database-dialect-service-nar + + diff --git a/nifi-extension-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml b/nifi-extension-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml index 9d3984866586..4dcdb3ef0deb 100644 --- a/nifi-extension-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml +++ b/nifi-extension-bundles/nifi-standard-services/nifi-standard-services-api-nar/pom.xml @@ -49,6 +49,11 @@ nifi-ssl-context-service-api compile + + org.apache.nifi + nifi-database-dialect-service-api + compile + org.apache.nifi nifi-distributed-cache-client-service-api diff --git a/nifi-extension-bundles/nifi-standard-services/pom.xml b/nifi-extension-bundles/nifi-standard-services/pom.xml index dd3491a85139..09e7a4dfd111 100644 --- a/nifi-extension-bundles/nifi-standard-services/pom.xml +++ b/nifi-extension-bundles/nifi-standard-services/pom.xml @@ -24,6 +24,8 @@ nifi-standard-services pom + nifi-database-dialect-service-bundle + nifi-database-dialect-service-api nifi-oauth2-provider-api nifi-oauth2-provider-bundle nifi-distributed-cache-client-service-api