Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.flink.table.catalog.ResolvedCatalogTable;
import org.apache.flink.table.catalog.ResolvedSchema;
import org.apache.flink.table.catalog.SchemaTranslator;
import org.apache.flink.table.catalog.TableWritePrivilege;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.expressions.ApiExpressionUtils;
import org.apache.flink.table.expressions.Expression;
Expand All @@ -60,11 +61,14 @@
import org.apache.flink.table.operations.utils.OperationExpressionsUtils.CategorizedExpressions;
import org.apache.flink.table.operations.utils.OperationTreeBuilder;

import org.apache.flink.shaded.guava33.com.google.common.collect.Sets;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
Expand Down Expand Up @@ -428,8 +432,9 @@ public TablePipeline insertInto(String tablePath, boolean overwrite) {
tableEnvironment.getParser().parseIdentifier(tablePath);
ObjectIdentifier objectIdentifier =
tableEnvironment.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
Set<TableWritePrivilege> privileges = Sets.newHashSet(TableWritePrivilege.INSERT);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this needs to have junit tests added to drive the new code paths

ContextResolvedTable contextResolvedTable =
tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier);
tableEnvironment.getCatalogManager().getTableOrError(objectIdentifier, privileges);
return insertInto(contextResolvedTable, overwrite);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -687,7 +687,19 @@ public Optional<ContextResolvedTable> getTable(ObjectIdentifier objectIdentifier
resolveCatalogBaseTable(temporaryTable);
return Optional.of(ContextResolvedTable.temporary(objectIdentifier, resolvedTable));
} else {
return getPermanentTable(objectIdentifier, null);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is an exciting addition, I think it is big enough to warrant a Flip.

Copy link
Author

@jerqi jerqi Jan 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This pull request includes all the required code. It doesn't have many changes. FLIP may need many code changes in my opinion.
If you think that we should start a discussion thread in the dev mail list, I can start the discussion in the dev mail list.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jerqi Thankyou I suggest starting a discussion on the dev list to see if it warrants a Flip.

return getPermanentTable(objectIdentifier, null, null);
}
}

public Optional<ContextResolvedTable> getTable(
ObjectIdentifier objectIdentifier, Set<TableWritePrivilege> privileges) {
CatalogBaseTable temporaryTable = temporaryTables.get(objectIdentifier);
if (temporaryTable != null) {
final ResolvedCatalogBaseTable<?> resolvedTable =
resolveCatalogBaseTable(temporaryTable);
return Optional.of(ContextResolvedTable.temporary(objectIdentifier, resolvedTable));
} else {
return getPermanentTable(objectIdentifier, null, privileges);
}
}

Expand All @@ -708,7 +720,7 @@ public Optional<ContextResolvedTable> getTable(
resolveCatalogBaseTable(temporaryTable);
return Optional.of(ContextResolvedTable.temporary(objectIdentifier, resolvedTable));
} else {
return getPermanentTable(objectIdentifier, timestamp);
return getPermanentTable(objectIdentifier, timestamp, null);
}
}

Expand Down Expand Up @@ -753,6 +765,17 @@ public ContextResolvedTable getTableOrError(ObjectIdentifier objectIdentifier) {
objectIdentifier, listCatalogs())));
}

public ContextResolvedTable getTableOrError(
ObjectIdentifier objectIdentifier, Set<TableWritePrivilege> privileges) {
return getTable(objectIdentifier, privileges)
.orElseThrow(
() ->
new ValidationException(
String.format(
"Cannot find table '%s' in any of the catalogs %s, nor as a temporary table.",
objectIdentifier, listCatalogs())));
}

/**
* Retrieves a partition with a fully qualified table path and partition spec. If the path is
* not yet fully qualified use{@link #qualifyIdentifier(UnresolvedIdentifier)} first.
Expand All @@ -777,7 +800,9 @@ public Optional<CatalogPartition> getPartition(
}

private Optional<ContextResolvedTable> getPermanentTable(
ObjectIdentifier objectIdentifier, @Nullable Long timestamp) {
ObjectIdentifier objectIdentifier,
@Nullable Long timestamp,
@Nullable Set<TableWritePrivilege> privileges) {
Optional<Catalog> catalogOptional = getCatalog(objectIdentifier.getCatalogName());
ObjectPath objectPath = objectIdentifier.toObjectPath();
if (catalogOptional.isPresent()) {
Expand All @@ -792,6 +817,8 @@ private Optional<ContextResolvedTable> getPermanentTable(
"%s is a view, but time travel is not supported for view.",
objectIdentifier.asSummaryString()));
}
} else if (privileges != null) {
table = currentCatalog.getTable(objectPath, privileges);
} else {
table = currentCatalog.getTable(objectPath);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.Set;

import static org.apache.flink.util.Preconditions.checkNotNull;

Expand Down Expand Up @@ -282,6 +283,23 @@ default CatalogBaseTable getTable(ObjectPath tablePath, long timestamp)
"getTable(ObjectPath, long) is not implemented for %s.", this.getClass()));
}

/**
* Returns a {@link CatalogTable} or {@link CatalogView} with specific write privileges
* identified by the given {@link ObjectPath}. The framework will resolve the metadata objects
* when necessary.
*
* @param tablePath Path of the table or view
* @param writePrivileges specific write privileges for the table
* @return The requested table or view
* @throws TableNotExistException if the target does not exist
* @throws CatalogException in case of any runtime exception
*/
default CatalogBaseTable getTable(
ObjectPath tablePath, Set<TableWritePrivilege> writePrivileges)
throws TableNotExistException, CatalogException {
return getTable(tablePath);
}

/**
* Check if a table or view exists in this catalog.
*
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.flink.table.catalog;

import org.apache.flink.annotation.PublicEvolving;

/** The table writing privileges that will be provided when loading a table. */
@PublicEvolving
public enum TableWritePrivilege {
/** The privilege of adding rows to the table. */
INSERT,

/** The privilege for changing existing rows in the table. */
UPDATE,

/** The privilege for deleting rows from the table. */
DELETE
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.ContextResolvedTable;
import org.apache.flink.table.catalog.ObjectIdentifier;
import org.apache.flink.table.catalog.TableWritePrivilege;
import org.apache.flink.table.catalog.UnresolvedIdentifier;
import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException;
import org.apache.flink.table.connector.sink.DynamicTableSink;
Expand Down Expand Up @@ -115,6 +116,8 @@
import org.apache.flink.table.planner.utils.OperationConverterUtils;
import org.apache.flink.table.planner.utils.RowLevelModificationContextUtils;

import org.apache.flink.shaded.guava33.com.google.common.collect.Sets;

import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.hint.HintStrategyTable;
import org.apache.calcite.rel.hint.RelHint;
Expand All @@ -134,6 +137,7 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;

/**
Expand Down Expand Up @@ -323,9 +327,10 @@ private Operation convertSqlInsert(RichSqlInsert insert) {

UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Set<TableWritePrivilege> privileges = Sets.newHashSet(TableWritePrivilege.INSERT);
// If it is materialized table, convert it to catalog table for query optimize
ContextResolvedTable contextResolvedTable =
catalogManager.getTableOrError(identifier).toCatalogTable();
catalogManager.getTableOrError(identifier, privileges).toCatalogTable();

PlannerQueryOperation query =
(PlannerQueryOperation)
Expand Down Expand Up @@ -655,9 +660,10 @@ private Operation convertDelete(SqlDelete sqlDelete) {
LogicalTableModify tableModify = (LogicalTableModify) deleteRelational.rel;
UnresolvedIdentifier unresolvedTableIdentifier =
UnresolvedIdentifier.of(tableModify.getTable().getQualifiedName());
Set<TableWritePrivilege> privileges = Sets.newHashSet(TableWritePrivilege.DELETE);
ContextResolvedTable contextResolvedTable =
catalogManager.getTableOrError(
catalogManager.qualifyIdentifier(unresolvedTableIdentifier));
catalogManager.qualifyIdentifier(unresolvedTableIdentifier), privileges);
// try push down delete
Optional<DynamicTableSink> optionalDynamicTableSink =
DeletePushDownUtils.getDynamicTableSink(contextResolvedTable, tableModify);
Expand Down Expand Up @@ -700,9 +706,10 @@ private Operation convertUpdate(SqlUpdate sqlUpdate) {
LogicalTableModify tableModify = (LogicalTableModify) updateRelational.rel;
UnresolvedIdentifier unresolvedTableIdentifier =
UnresolvedIdentifier.of(tableModify.getTable().getQualifiedName());
Set<TableWritePrivilege> privileges = Sets.newHashSet(TableWritePrivilege.UPDATE);
ContextResolvedTable contextResolvedTable =
catalogManager.getTableOrError(
catalogManager.qualifyIdentifier(unresolvedTableIdentifier));
catalogManager.qualifyIdentifier(unresolvedTableIdentifier), privileges);
// get query
PlannerQueryOperation queryOperation =
new PlannerQueryOperation(
Expand Down