@@ -218,7 +218,7 @@ def _get_partitions(
218218 connection : Connection ,
219219 table_name : str ,
220220 schema : str = None
221- ) -> List [Dict [ str , List [ Any ]]] :
221+ ) -> List [str ] | None :
222222 schema = schema or self ._get_default_schema_name (connection )
223223 query = dedent (
224224 f"""
@@ -227,6 +227,17 @@ def _get_partitions(
227227 ).strip ()
228228 res = connection .execute (sql .text (query ))
229229 partition_names = [desc [0 ] for desc in res .cursor .description ]
230+ data_types = [desc [1 ] for desc in res .cursor .description ]
231+ # Compare the column names and types to the shape of an Iceberg $partitions table
232+ if (partition_names == ['partition' , 'record_count' , 'file_count' , 'total_size' , 'data' ]
233+ and data_types [0 ].startswith ('row(' )
234+ and data_types [1 ] == 'bigint'
235+ and data_types [2 ] == 'bigint'
236+ and data_types [3 ] == 'bigint'
237+ and data_types [4 ].startswith ('row(' )):
238+ # This is an Iceberg $partitions table - these match the partition metadata columns
239+ return None
240+ # This is a Hive table - these are the partition names
230241 return partition_names
231242
232243 def get_pk_constraint (self , connection : Connection , table_name : str , schema : str = None , ** kw ) -> Dict [str , Any ]:
@@ -326,7 +337,7 @@ def get_indexes(self, connection: Connection, table_name: str, schema: str = Non
326337 try :
327338 partitioned_columns = self ._get_partitions (connection , f"{ table_name } " , schema )
328339 except Exception as e :
329- # e.g. it's not a Hive table or an unpartitioned Hive table
340+ # e.g. it's an unpartitioned Hive table
330341 logger .debug ("Couldn't fetch partition columns. schema: %s, table: %s, error: %s" , schema , table_name , e )
331342 if not partitioned_columns :
332343 return []
0 commit comments