fix: error around latest partition in BigQuery (#11274)
* fix: error around latest partition in BigQuery * lint * Going with a backend-first approach * fix test * add an extra test
This commit is contained in:
parent
c360413fc2
commit
796a2a6924
|
|
@ -110,7 +110,7 @@ class TableElement extends React.PureComponent {
|
|||
/>
|
||||
);
|
||||
}
|
||||
let latest = Object.entries(table.partitions.latest).map(
|
||||
let latest = Object.entries(table.partitions?.latest || []).map(
|
||||
([key, value]) => `${key}=${value}`,
|
||||
);
|
||||
latest = latest.join('/');
|
||||
|
|
|
|||
|
|
@ -376,6 +376,18 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||
return type_code.upper()
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def normalize_indexes(cls, indexes: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Normalizes indexes for more consistency across db engines
|
||||
|
||||
noop by default
|
||||
|
||||
:param indexes: Raw indexes as returned by SQLAlchemy
|
||||
:return: cleaner, more aligned index definition
|
||||
"""
|
||||
return indexes
|
||||
|
||||
@classmethod
|
||||
def extra_table_metadata(
|
||||
cls, database: "Database", table_name: str, schema_name: str
|
||||
|
|
|
|||
|
|
@ -129,6 +129,25 @@ class BigQueryEngineSpec(BaseEngineSpec):
|
|||
"""
|
||||
return "_" + hashlib.md5(label.encode("utf-8")).hexdigest()
|
||||
|
||||
@classmethod
|
||||
def normalize_indexes(cls, indexes: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Normalizes indexes for more consistency across db engines
|
||||
|
||||
:param indexes: Raw indexes as returned by SQLAlchemy
|
||||
:return: cleaner, more aligned index definition
|
||||
"""
|
||||
normalized_idxs = []
|
||||
# Fixing a bug/behavior observed in pybigquery==0.4.15 where
|
||||
# the index's `column_names` == [None]
|
||||
# Here we're returning only non-None indexes
|
||||
for ix in indexes:
|
||||
column_names = ix.get("column_names") or []
|
||||
ix["column_names"] = [col for col in column_names if col is not None]
|
||||
if ix["column_names"]:
|
||||
normalized_idxs.append(ix)
|
||||
return normalized_idxs
|
||||
|
||||
@classmethod
|
||||
def extra_table_metadata(
|
||||
cls, database: "Database", table_name: str, schema_name: str
|
||||
|
|
|
|||
|
|
@ -619,7 +619,8 @@ class Database(
|
|||
def get_indexes(
|
||||
self, table_name: str, schema: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
return self.inspector.get_indexes(table_name, schema)
|
||||
indexes = self.inspector.get_indexes(table_name, schema)
|
||||
return self.db_engine_spec.normalize_indexes(indexes)
|
||||
|
||||
def get_pk_constraint(
|
||||
self, table_name: str, schema: Optional[str] = None
|
||||
|
|
|
|||
|
|
@ -122,6 +122,27 @@ class TestBigQueryDbEngineSpec(TestDbEngineSpec):
|
|||
)
|
||||
self.assertEqual(result, expected_result)
|
||||
|
||||
def test_normalize_indexes(self):
|
||||
"""
|
||||
DB Eng Specs (bigquery): Test extra table metadata
|
||||
"""
|
||||
indexes = [{"name": "partition", "column_names": [None], "unique": False}]
|
||||
normalized_idx = BigQueryEngineSpec.normalize_indexes(indexes)
|
||||
self.assertEqual(normalized_idx, [])
|
||||
|
||||
indexes = [{"name": "partition", "column_names": ["dttm"], "unique": False}]
|
||||
normalized_idx = BigQueryEngineSpec.normalize_indexes(indexes)
|
||||
self.assertEqual(normalized_idx, indexes)
|
||||
|
||||
indexes = [
|
||||
{"name": "partition", "column_names": ["dttm", None], "unique": False}
|
||||
]
|
||||
normalized_idx = BigQueryEngineSpec.normalize_indexes(indexes)
|
||||
self.assertEqual(
|
||||
normalized_idx,
|
||||
[{"name": "partition", "column_names": ["dttm"], "unique": False}],
|
||||
)
|
||||
|
||||
def test_df_to_sql(self):
|
||||
"""
|
||||
DB Eng Specs (bigquery): Test DataFrame to SQL contract
|
||||
|
|
|
|||
Loading…
Reference in New Issue