Skip to content

Instantly share code, notes, and snippets.

@toddlipcon
Created May 22, 2019 04:28
Show Gist options
  • Save toddlipcon/380f9caf9dacfd70b080b5dcbe6620ff to your computer and use it in GitHub Desktop.
Save toddlipcon/380f9caf9dacfd70b080b5dcbe6620ff to your computer and use it in GitHub Desktop.
diff --git a/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
index b5fda6280c..1a311955fb 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
@@ -90,8 +90,6 @@ public abstract class AlterTableStmt extends StatementBase {
}
Preconditions.checkState(tableRef instanceof BaseTableRef);
table_ = tableRef.getTable();
- // Adding the check here instead of tableRef.analyze because tableRef is
- // used at multiple places and will even disallow select.
analyzer.ensureTableNotTransactional(table_);
if (table_ instanceof FeDataSourceTable
&& !(this instanceof AlterTableSetColumnStats)) {
diff --git a/fe/src/main/java/org/apache/impala/analysis/Analyzer.java b/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
index e03949ad41..4cf8efe94b 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
@@ -202,7 +202,7 @@ public class Analyzer {
*/
public static void ensureTableNotFullAcid(Map<String, String> tblProperties,
String tableName)
- throws AnalysisException {
+ throws AnalysisException {
if (AcidUtils.isFullAcidTable(tblProperties)) {
throw new AnalysisException(String.format(INSERT_ONLY_ACID_TABLE_SUPPORTED_ERROR_MSG,
tableName));
@@ -214,13 +214,13 @@ public class Analyzer {
* @throws AnalysisException If table is full acid table.
*/
public static void ensureTableNotFullAcid(FeTable table)
- throws AnalysisException {
+ throws AnalysisException {
ensureTableNotFullAcid(table.getMetaStoreTable().getParameters(),
table.getFullName());
}
public static void ensureTableNotTransactional(FeTable table)
- throws AnalysisException {
+ throws AnalysisException {
if (AcidUtils.isTransactionalTable(table.getMetaStoreTable().getParameters())) {
throw new AnalysisException(String.format(TRANSACTIONAL_TABLE_NOT_SUPPORTED,
table.getFullName()));
diff --git a/fe/src/main/java/org/apache/impala/util/AcidUtils.java b/fe/src/main/java/org/apache/impala/util/AcidUtils.java
index 184a1d8857..0e22c53a08 100644
--- a/fe/src/main/java/org/apache/impala/util/AcidUtils.java
+++ b/fe/src/main/java/org/apache/impala/util/AcidUtils.java
@@ -23,7 +23,7 @@ import java.util.Map;
/**
* Contains utility functions for working with Acid tables.
* <p>
- * The code is mostly copy pasted from Hive. Ideally we should use the
+ * The code is mostly copy pasted from Hive. Ideally we should use the
* the code directly from Hive.
* </p>
*/
diff --git a/testdata/bin/generate-schema-statements.py b/testdata/bin/generate-schema-statements.py
index 54a5253eee..f4905ae31a 100755
--- a/testdata/bin/generate-schema-statements.py
+++ b/testdata/bin/generate-schema-statements.py
@@ -244,29 +244,32 @@ def build_create_statement(table_template, table_name, db_name, db_suffix,
hdfs_location=hdfs_location)
return create_stmt
+
def parse_table_properties(file_format, table_properties):
- # Read the properties specified in the TABLE_PROPERTIES section.
- # The table properties can be restricted to a file format or are applicable
- # for all formats.
- # For specific format the syntax is <fileformat>:<key>=<val>
+ """
+ Read the properties specified in the TABLE_PROPERTIES section.
+ The table properties can be restricted to a file format or are applicable
+ for all formats.
+ For specific format the syntax is <fileformat>:<key>=<val>
+ """
tblproperties = {}
TABLE_PROPERTY_RE = re.compile(
# Optional '<data-format>:' prefix, capturing just the 'data-format' part.
r'(?:(\w+):)?' +
# Required key=value, capturing the key and value
r'(.+?)=(.*)')
- if table_properties:
- for table_property in filter(None, table_properties.split("\n")):
- m = TABLE_PROPERTY_RE.match(table_property)
- if not m:
- raise Exception("Invalid table property line :{0}", format(table_property))
- only_format, key, val = m.groups()
- if only_format is not None and only_format != file_format:
- continue
- tblproperties[key] = val
+ for table_property in filter(None, table_properties.split("\n")):
+ m = TABLE_PROPERTY_RE.match(table_property)
+ if not m:
+ raise Exception("Invalid table property line: {0}", format(table_property))
+ only_format, key, val = m.groups()
+ if only_format is not None and only_format != file_format:
+ continue
+ tblproperties[key] = val
return tblproperties
+
def build_table_template(file_format, columns, partition_columns, row_format,
avro_schema_dir, table_name, tblproperties):
if file_format == 'hbase':
@@ -792,15 +795,17 @@ def generate_statements(output_name, test_vectors, sections,
hbase_post_load.write_to_file('post-load-' + output_name + '-hbase-generated.sql')
impala_invalidate.write_to_file("invalidate-" + output_name + "-impala-generated.sql")
+
def is_transactional(table_properties):
return table_properties.get('transactional', "").lower() == 'true'
+
def parse_schema_template_file(file_name):
VALID_SECTION_NAMES = ['DATASET', 'BASE_TABLE_NAME', 'COLUMNS', 'PARTITION_COLUMNS',
'ROW_FORMAT', 'CREATE', 'CREATE_HIVE', 'CREATE_KUDU',
'DEPENDENT_LOAD', 'DEPENDENT_LOAD_KUDU', 'DEPENDENT_LOAD_HIVE',
'LOAD', 'ALTER', 'HBASE_COLUMN_FAMILIES',
- 'TABLE_PROPERTIES', 'HBASE_REGION_SPLITS','HIVE_MAJOR_VERSION']
+ 'TABLE_PROPERTIES', 'HBASE_REGION_SPLITS', 'HIVE_MAJOR_VERSION']
return parse_test_file(file_name, VALID_SECTION_NAMES, skip_unknown_sections=False)
if __name__ == "__main__":
diff --git a/testdata/datasets/README b/testdata/datasets/README
index 8e013bae09..75de1b4c27 100644
--- a/testdata/datasets/README
+++ b/testdata/datasets/README
@@ -74,6 +74,7 @@ The schema template SQL files have the following format:
DEPENDENT_LOAD_HIVE
Statements to be executed during the "dependent load" phase. These statements
are run after the initial (base table) load is complete.
+
HIVE_MAJOR_VERSION
The required major version of Hive for this table. If the major version
of Hive at runtime does not exactly match the version specified in this section,
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment