Unverified Commit 804b20cf authored by Dorian Johnson's avatar Dorian Johnson Committed by GitHub

chore: mypy cleanup (convert last comment types, remove noqa imports) (#338)

Signed-off-by: 's avatarTao Feng <fengtao04@gmail.com>
parent b32fb653
......@@ -3,7 +3,7 @@
import abc
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
class Scoped(object, metaclass=abc.ABCMeta):
......
......@@ -4,7 +4,7 @@
import abc
import logging
from typing import List, Optional # noqa: F401
from typing import List, Optional
LOGGER = logging.getLogger(__name__)
......@@ -48,7 +48,7 @@ def notify_callbacks(callbacks: List[Callback], is_success: bool) -> None:
LOGGER.info('Notifying callbacks')
last_exception = None # type: Optional[Exception]
last_exception: Optional[Exception] = None
for callback in callbacks:
try:
if is_success:
......
......@@ -4,8 +4,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -9,8 +9,8 @@ import google.oauth2.service_account
import google_auth_httplib2
from googleapiclient.discovery import build
import httplib2
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Dict, Iterator, List # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Dict, Iterator, List
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,8 +3,8 @@
import abc
from pyhocon import ConfigTree # noqa: F401
from typing import Any # noqa: F401
from pyhocon import ConfigTree
from typing import Any
from databuilder import Scoped
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import cast, Any, Dict, List, Set # noqa: F401
from pyhocon import ConfigTree
from typing import cast, Any, Dict, List, Set
from databuilder.extractor.base_bigquery_extractor import BaseBigQueryExtractor, DatasetRef
from databuilder.models.table_metadata import TableMetadata, ColumnMetadata
......
......@@ -7,8 +7,8 @@ import logging
import re
from time import sleep
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Iterator, Dict, Optional, Tuple # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Iterator, Dict, Optional, Tuple
from databuilder.extractor.base_bigquery_extractor import BaseBigQueryExtractor
......
......@@ -7,8 +7,8 @@ import logging
import datetime
import textwrap
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Dict, Iterator, List, Tuple, Union # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Dict, Iterator, List, Tuple, Union
from databuilder.extractor.base_bigquery_extractor import BaseBigQueryExtractor, DatasetRef
from databuilder.models.watermark import Watermark
......
......@@ -4,8 +4,8 @@
from cassandra.cluster import Cluster
import cassandra.metadata
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any, List # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict
from databuilder.extractor.base_extractor import Extractor
from databuilder.models.table_metadata import TableMetadata, ColumnMetadata
......
......@@ -5,8 +5,8 @@ import csv
import importlib
from collections import defaultdict
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Iterator # noqa: F401
from pyhocon import ConfigTree
from typing import Any
from databuilder.extractor.base_extractor import Extractor
from databuilder.models.table_metadata import TableMetadata, ColumnMetadata
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Any, List # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from typing import Any, List
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Any, List # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from typing import Any, List
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,14 +3,14 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Any, List # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from typing import Any, List
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_utils import ModeDashboardUtils
from databuilder.rest_api.mode_analytics.mode_paginated_rest_api_query import ModePaginatedRestApiQuery
from databuilder.rest_api.rest_api_query import RestApiQuery # noqa: F401
from databuilder.rest_api.rest_api_query import RestApiQuery
from databuilder.transformer.base_transformer import ChainedTransformer, Transformer
from databuilder.transformer.dict_to_model import DictToModel, MODEL_CLASS
from databuilder.transformer.template_variable_substitution_transformer import \
......
......@@ -3,14 +3,14 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_executions_extractor import \
ModeDashboardExecutionsExtractor
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_utils import ModeDashboardUtils
from databuilder.extractor.restapi.rest_api_extractor import STATIC_RECORD_DICT
from databuilder.rest_api.mode_analytics.mode_paginated_rest_api_query import ModePaginatedRestApiQuery
from databuilder.rest_api.rest_api_query import RestApiQuery # noqa: F401
from databuilder.rest_api.rest_api_query import RestApiQuery
from databuilder.transformer.dict_to_model import DictToModel, MODEL_CLASS
from databuilder.transformer.timestamp_string_to_epoch import TimestampStringToEpoch, FIELD_NAME
......
......@@ -3,7 +3,7 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_executions_extractor import \
ModeDashboardExecutionsExtractor
......@@ -11,7 +11,7 @@ from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_utils import
from databuilder.extractor.restapi.rest_api_extractor import STATIC_RECORD_DICT
from databuilder.models.dashboard.dashboard_execution import DashboardExecution
from databuilder.rest_api.mode_analytics.mode_paginated_rest_api_query import ModePaginatedRestApiQuery
from databuilder.rest_api.rest_api_query import RestApiQuery # noqa: F401
from databuilder.rest_api.rest_api_query import RestApiQuery
LOGGER = logging.getLogger(__name__)
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Any # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from typing import Any
from databuilder.extractor.base_extractor import Extractor
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_utils import ModeDashboardUtils
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Any, List # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from typing import Any, List
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,13 +3,13 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Any # noqa: F401
from pyhocon import ConfigTree
from typing import Any
from databuilder.extractor.base_extractor import Extractor
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_utils import ModeDashboardUtils
from databuilder.rest_api.mode_analytics.mode_paginated_rest_api_query import ModePaginatedRestApiQuery
from databuilder.rest_api.rest_api_query import RestApiQuery # noqa: F401
from databuilder.rest_api.rest_api_query import RestApiQuery
LOGGER = logging.getLogger(__name__)
......
......@@ -3,9 +3,9 @@
import logging
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from requests.auth import HTTPBasicAuth
from typing import Any, List # noqa: F401
from typing import Any, List
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from requests.auth import HTTPBasicAuth
from typing import Any, Dict
......@@ -9,9 +9,9 @@ from databuilder import Scoped
from databuilder.extractor.dashboard.mode_analytics.mode_dashboard_constants import ORGANIZATION, MODE_ACCESS_TOKEN, \
MODE_PASSWORD_TOKEN
from databuilder.extractor.restapi.rest_api_extractor import RestAPIExtractor, REST_API_QUERY, STATIC_RECORD_DICT
from databuilder.rest_api.base_rest_api_query import BaseRestApiQuery # noqa: F401
from databuilder.rest_api.base_rest_api_query import BaseRestApiQuery
from databuilder.rest_api.base_rest_api_query import RestApiQuerySeed
from databuilder.rest_api.rest_api_query import RestApiQuery # noqa: F401
from databuilder.rest_api.rest_api_query import RestApiQuery
class ModeDashboardUtils(object):
......
......@@ -4,8 +4,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,9 +3,9 @@
import importlib
import logging
from typing import Iterable, Any # noqa: F401
from typing import Iterable, Any
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder.extractor.base_extractor import Extractor
......
......@@ -5,8 +5,8 @@ import logging
from collections import namedtuple
import textwrap
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -2,9 +2,9 @@
# SPDX-License-Identifier: Apache-2.0
import importlib
from typing import Iterable, Any # noqa: F401
from typing import Iterable, Any
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,8 +3,8 @@
import boto3
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any, List # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any, List
from databuilder.extractor.base_extractor import Extractor
from databuilder.models.table_metadata import TableMetadata, ColumnMetadata
......
......@@ -7,9 +7,9 @@ from datetime import datetime
from functools import wraps
from multiprocessing.pool import ThreadPool
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from pytz import UTC
from typing import Iterator, Union, Any, List # noqa: F401
from typing import Iterator, Union, Any, List
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -4,8 +4,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -4,8 +4,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -4,8 +4,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,9 +3,9 @@
import importlib
import time
from typing import Iterable, Any # noqa: F401
from typing import Any
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder.extractor.generic_extractor import GenericExtractor
......
......@@ -3,9 +3,9 @@
import importlib
import logging
from typing import Any, Iterator, Union # noqa: F401
from typing import Any, Iterator, Union
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from neo4j import GraphDatabase
import neo4j
......
......@@ -2,9 +2,9 @@
# SPDX-License-Identifier: Apache-2.0
import textwrap
from typing import Any # noqa: F401
from typing import Any
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -4,8 +4,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -5,8 +5,8 @@ import base64
import json
import logging
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any, List # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, List, Union
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,12 +3,12 @@
import logging
import importlib
from typing import Any, Iterator, Dict, Optional # noqa: F401
from typing import Any, Iterator, Dict, Optional
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder.extractor.base_extractor import Extractor
from databuilder.rest_api.base_rest_api_query import BaseRestApiQuery # noqa: F401
from databuilder.rest_api.base_rest_api_query import BaseRestApiQuery
REST_API_QUERY = 'restapi_query'
......
......@@ -5,8 +5,8 @@
import logging
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Iterator, Union, Dict, Any # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Iterator, Union, Dict, Any
from unidecode import unidecode
from databuilder import Scoped
......
......@@ -4,8 +4,8 @@
import importlib
from sqlalchemy import create_engine
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Iterator # noqa: F401
from pyhocon import ConfigTree
from typing import Any
from databuilder.extractor.base_extractor import Extractor
......
......@@ -3,9 +3,9 @@
import logging
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from retrying import retry
from typing import List # noqa: F401
from typing import List
from databuilder import Scoped
from databuilder.filesystem.metadata import FileMetadata
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from datetime import datetime # noqa: F401
from datetime import datetime
class FileMetadata(object):
......
......@@ -3,7 +3,7 @@
import abc
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder import Scoped
from databuilder.utils.closer import Closer
......
......@@ -3,14 +3,14 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from statsd import StatsClient
from databuilder import Scoped
from databuilder.job.base_job import Job
from databuilder.publisher.base_publisher import NoopPublisher
from databuilder.publisher.base_publisher import Publisher # noqa: F401
from databuilder.task.base_task import Task # noqa: F401
from databuilder.publisher.base_publisher import Publisher
from databuilder.task.base_task import Task
LOGGER = logging.getLogger(__name__)
......
......@@ -3,10 +3,10 @@
import abc
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder import Scoped
from typing import Any # noqa: F401
from typing import Any
class Loader(Scoped):
......
......@@ -4,8 +4,8 @@
import csv
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Any # noqa: F401
from pyhocon import ConfigTree
from typing import Any
from databuilder.loader.base_loader import Loader
......
......@@ -3,7 +3,7 @@
import os
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder.loader.base_loader import Loader
from databuilder.models.elasticsearch_document import ElasticsearchDocument
......
......@@ -5,16 +5,16 @@ import csv
import logging
import os
import shutil
from csv import DictWriter # noqa: F401
from csv import DictWriter
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Dict, Any # noqa: F401
from pyhocon import ConfigTree, ConfigFactory
from typing import Dict, Any
from databuilder.job.base_job import Job
from databuilder.loader.base_loader import Loader
from databuilder.models.neo4j_csv_serde import NODE_LABEL, \
RELATION_START_LABEL, RELATION_END_LABEL, RELATION_TYPE
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable # noqa: F401
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable
from databuilder.utils.closer import Closer
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Optional, Any # noqa: F401
from pyhocon import ConfigTree
from typing import Optional, Any
from databuilder.loader.base_loader import Loader
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Union
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Union, Dict, Any, Iterable, List # noqa: F401
from typing import Union, Dict, Any, Iterable, List
from databuilder.models.neo4j_csv_serde import (
Neo4jCsvSerializable, RELATION_START_KEY, RELATION_END_KEY,
......
......@@ -3,7 +3,7 @@
import logging
from typing import Optional, Dict, Any, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, Union, Iterator
from databuilder.models.dashboard.dashboard_query import DashboardQuery
from databuilder.models.neo4j_csv_serde import (
......
......@@ -3,7 +3,7 @@
import logging
from typing import Optional, Dict, Any, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, Union, Iterator
from databuilder.models.dashboard.dashboard_metadata import DashboardMetadata
from databuilder.models.neo4j_csv_serde import (
......
......@@ -3,7 +3,7 @@
import logging
from typing import Optional, Dict, Any, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, Union, Iterator
from databuilder.models.dashboard.dashboard_metadata import DashboardMetadata
from databuilder.models.neo4j_csv_serde import (
......
......@@ -3,7 +3,7 @@
from collections import namedtuple
from typing import Any, Dict, Iterator, List, Optional, Set, Union # noqa: F401
from typing import Any, Dict, Iterator, List, Optional, Set, Union
from databuilder.models.cluster import cluster_constants
from databuilder.models.neo4j_csv_serde import (
......
......@@ -3,7 +3,7 @@
import logging
from typing import Optional, Dict, Any, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, Union, Iterator
from databuilder.models.dashboard.dashboard_metadata import DashboardMetadata
from databuilder.models.neo4j_csv_serde import (
......
......@@ -3,7 +3,7 @@
import logging
from typing import Optional, Dict, Any, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, Union, Iterator
from databuilder.models.dashboard.dashboard_metadata import DashboardMetadata
from databuilder.models.neo4j_csv_serde import (
......
......@@ -4,7 +4,7 @@
import logging
import re
from typing import Optional, Dict, Any, List, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, List, Union, Iterator
from databuilder.models.dashboard.dashboard_metadata import DashboardMetadata
from databuilder.models.neo4j_csv_serde import (
......
......@@ -3,7 +3,7 @@
import logging
from typing import Optional, Dict, Any, Union, Iterator # noqa: F401
from typing import Optional, Dict, Any, Union, Iterator
from databuilder.models.dashboard.dashboard_metadata import DashboardMetadata
from databuilder.models.neo4j_csv_serde import (
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import List, Optional, Union # noqa: F401
from typing import List, Optional, Union
from databuilder.models.elasticsearch_document import ElasticsearchDocument
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import List, Optional # noqa: F401
from typing import List
from databuilder.models.elasticsearch_document import ElasticsearchDocument
......
......@@ -3,7 +3,7 @@
from collections import namedtuple
from typing import Iterable, Any, Union, Iterator, Dict, Set, List # noqa: F401
from typing import Any, Iterator, Dict, List, Set, Union
# TODO: We could separate TagMetadata from table_metadata to own module
from databuilder.models.table_metadata import TagMetadata
......
......@@ -3,7 +3,7 @@
import abc
from typing import Dict, Set, Any, Union # noqa: F401
from typing import Dict, Set, Any, Union
NODE_KEY = 'KEY'
NODE_LABEL = 'LABEL'
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Union
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, NODE_LABEL
class Neo4jESLastUpdated(Neo4jCsvSerializable):
# type: (...) -> None
"""
Data model to keep track the last updated timestamp for
neo4j and es.
......@@ -18,9 +17,8 @@ class Neo4jESLastUpdated(Neo4jCsvSerializable):
LATEST_TIMESTAMP = 'latest_timestmap'
def __init__(self,
timestamp, # type: int
):
# type: (...) -> None
timestamp: int,
) -> None:
"""
:param timestamp: epoch for latest updated timestamp for neo4j an es
"""
......@@ -28,8 +26,7 @@ class Neo4jESLastUpdated(Neo4jCsvSerializable):
self._node_iter = iter(self.create_nodes())
self._rel_iter = iter(self.create_relation())
def create_next_node(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_node(self) -> Union[Dict[str, Any], None]:
"""
Will create an orphan node for last updated timestamp.
:return:
......@@ -39,8 +36,7 @@ class Neo4jESLastUpdated(Neo4jCsvSerializable):
except StopIteration:
return None
def create_nodes(self):
# type: () -> List[Dict[str, Any]]
def create_nodes(self) -> List[Dict[str, Any]]:
"""
Create a list of Neo4j node records.
:return:
......@@ -51,16 +47,11 @@ class Neo4jESLastUpdated(Neo4jCsvSerializable):
Neo4jESLastUpdated.LATEST_TIMESTAMP: self.timestamp
}]
def create_next_relation(self):
# type: () -> Union[Dict[str, Any], None]
"""
:return:
"""
def create_next_relation(self) -> Union[Dict[str, Any], None]:
try:
return next(self._rel_iter)
except StopIteration:
return None
def create_relation(self):
# type: () -> List[Dict[str, Any]]
def create_relation(self) -> List[Dict[str, Any]]:
return []
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Dict, Any, Union, Iterator # noqa: F401
from typing import Dict, Any, Union, Iterator
from databuilder.models.neo4j_csv_serde import (
Neo4jCsvSerializable, NODE_LABEL, NODE_KEY)
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Iterable, Union, Dict, Any, Iterator # noqa: F401
from typing import Iterable, Union, Dict, Any, Iterator
from databuilder.models.neo4j_csv_serde import (
Neo4jCsvSerializable, RELATION_START_KEY, RELATION_END_KEY,
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import List, Optional # noqa: F401
from typing import List, Optional
from databuilder.models.elasticsearch_document import ElasticsearchDocument
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Union
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......
......@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
import re
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Union
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, \
RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......
......@@ -4,7 +4,7 @@
import copy
from collections import namedtuple
from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Union # noqa: F401
from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Union
from databuilder.models.cluster import cluster_constants
from databuilder.models.neo4j_csv_serde import (
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Optional, Union
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......@@ -11,7 +11,6 @@ from databuilder.models.user import User
class TableOwner(Neo4jCsvSerializable):
# type: (...) -> None
"""
Hive table owner model.
"""
......@@ -19,13 +18,12 @@ class TableOwner(Neo4jCsvSerializable):
TABLE_OWNER_RELATION_TYPE = OWNER_RELATION_TYPE
def __init__(self,
db_name, # type: str
schema, # type: str
table_name, # type: str
owners, # type: Union[List, str]
cluster='gold', # type: str
):
# type: (...) -> None
db_name: str,
schema: str,
table_name: str,
owners: Union[List, str],
cluster: str ='gold',
) -> None:
self.db = db_name.lower()
self.schema = schema.lower()
self.table = table_name.lower()
......@@ -37,35 +35,29 @@ class TableOwner(Neo4jCsvSerializable):
self._node_iter = iter(self.create_nodes())
self._relation_iter = iter(self.create_relation())
def create_next_node(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_node(self) -> Optional[Dict[str, Any]]:
# return the string representation of the data
try:
return next(self._node_iter)
except StopIteration:
return None
def create_next_relation(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_relation(self) -> Optional[Dict[str, Any]]:
try:
return next(self._relation_iter)
except StopIteration:
return None
def get_owner_model_key(self, owner # type: str
):
# type: (...) -> str
def get_owner_model_key(self, owner: str) -> str:
return User.USER_NODE_KEY_FORMAT.format(email=owner)
def get_metadata_model_key(self):
# type: (...) -> str
def get_metadata_model_key(self) -> str:
return '{db}://{cluster}.{schema}/{table}'.format(db=self.db,
cluster=self.cluster,
schema=self.schema,
table=self.table)
def create_nodes(self):
# type: () -> List[Dict[str, Any]]
def create_nodes(self) -> List[Dict[str, Any]]:
"""
Create a list of Neo4j node records
:return:
......@@ -80,8 +72,7 @@ class TableOwner(Neo4jCsvSerializable):
})
return results
def create_relation(self):
# type: () -> List[Dict[str, Any]]
def create_relation(self) -> List[Dict[str, Any]]:
"""
Create a list of relation map between owner record with original hive table
:return:
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Optional
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......@@ -11,7 +11,6 @@ from databuilder.models.table_metadata import TableMetadata
class TableSource(Neo4jCsvSerializable):
# type: (...) -> None
"""
Hive table source model.
"""
......@@ -21,14 +20,13 @@ class TableSource(Neo4jCsvSerializable):
TABLE_SOURCE_RELATION_TYPE = 'SOURCE'
def __init__(self,
db_name, # type: str
schema, # type: str
table_name, # type: str
cluster, # type: str
source, # type: str
source_type='github', # type: str
):
# type: (...) -> None
db_name: str,
schema: str,
table_name: str,
cluster: str,
source: str,
source_type: str='github',
) -> None:
self.db = db_name.lower()
self.schema = schema.lower()
self.table = table_name.lower()
......@@ -40,37 +38,32 @@ class TableSource(Neo4jCsvSerializable):
self._node_iter = iter(self.create_nodes())
self._relation_iter = iter(self.create_relation())
def create_next_node(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_node(self) -> Optional[Dict[str, Any]]:
# return the string representation of the data
try:
return next(self._node_iter)
except StopIteration:
return None
def create_next_relation(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_relation(self) -> Optional[Dict[str, Any]]:
try:
return next(self._relation_iter)
except StopIteration:
return None
def get_source_model_key(self):
# type: (...) -> str
def get_source_model_key(self) -> str:
return TableSource.KEY_FORMAT.format(db=self.db,
cluster=self.cluster,
schema=self.schema,
tbl=self.table)
def get_metadata_model_key(self):
# type: (...) -> str
def get_metadata_model_key(self) -> str:
return '{db}://{cluster}.{schema}/{table}'.format(db=self.db,
cluster=self.cluster,
schema=self.schema,
table=self.table)
def create_nodes(self):
# type: () -> List[Dict[str, Any]]
def create_nodes(self) -> List[Dict[str, Any]]:
"""
Create a list of Neo4j node records
:return:
......@@ -83,8 +76,7 @@ class TableSource(Neo4jCsvSerializable):
}]
return results
def create_relation(self):
# type: () -> List[Dict[str, Any]]
def create_relation(self) -> List[Dict[str, Any]]:
"""
Create a list of relation map between owner record with original hive table
:return:
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Optional
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......@@ -10,7 +10,6 @@ from databuilder.models.table_metadata import ColumnMetadata
class TableColumnStats(Neo4jCsvSerializable):
# type: (...) -> None
"""
Hive table stats model.
Each instance represents one row of hive watermark result.
......@@ -22,17 +21,16 @@ class TableColumnStats(Neo4jCsvSerializable):
Column_STAT_RELATION_TYPE = 'STAT'
def __init__(self,
table_name, # type: str
col_name, # type: str
stat_name, # type: str
stat_val, # type: str
start_epoch, # type: str
end_epoch, # type: str
db='hive', # type: str
cluster='gold', # type: str
schema=None # type: str
):
# type: (...) -> None
table_name: str,
col_name: str,
stat_name: str,
stat_val: str,
start_epoch: str,
end_epoch: str,
db: str='hive',
cluster: str='gold',
schema: str=None
) -> None:
if schema is None:
self.schema, self.table = table_name.split('.')
else:
......@@ -48,23 +46,20 @@ class TableColumnStats(Neo4jCsvSerializable):
self._node_iter = iter(self.create_nodes())
self._relation_iter = iter(self.create_relation())
def create_next_node(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_node(self) -> Optional[Dict[str, Any]]:
# return the string representation of the data
try:
return next(self._node_iter)
except StopIteration:
return None
def create_next_relation(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_relation(self) -> Optional[Dict[str, Any]]:
try:
return next(self._relation_iter)
except StopIteration:
return None
def get_table_stat_model_key(self):
# type: (...) -> str
def get_table_stat_model_key(self) -> str:
return TableColumnStats.KEY_FORMAT.format(db=self.db,
cluster=self.cluster,
schema=self.schema,
......@@ -72,8 +67,7 @@ class TableColumnStats(Neo4jCsvSerializable):
col=self.col_name,
stat_name=self.stat_name)
def get_col_key(self):
# type: (...) -> str
def get_col_key(self) -> str:
# no cluster, schema info from the input
return ColumnMetadata.COLUMN_KEY_FORMAT.format(db=self.db,
cluster=self.cluster,
......@@ -81,8 +75,7 @@ class TableColumnStats(Neo4jCsvSerializable):
tbl=self.table,
col=self.col_name)
def create_nodes(self):
# type: () -> List[Dict[str, Any]]
def create_nodes(self) -> List[Dict[str, Any]]:
"""
Create a list of Neo4j node records
:return:
......@@ -97,8 +90,7 @@ class TableColumnStats(Neo4jCsvSerializable):
}]
return results
def create_relation(self):
# type: () -> List[Dict[str, Any]]
def create_relation(self) -> List[Dict[str, Any]]:
"""
Create a list of relation map between table stat record with original hive table
:return:
......
......@@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
import copy
from typing import Any, List, Dict, Optional # noqa: F401
from typing import Any, List, Dict, Optional
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......@@ -153,8 +153,7 @@ class User(Neo4jCsvSerializable):
return [result_node]
def create_relation(self):
# type: () -> List[Dict[str, Any]]
def create_relation(self) -> List[Dict[str, Any]]:
if self.manager_email:
# only create the relation if the manager exists
return [{
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from typing import Any, Dict, List, Union # noqa: F401
from typing import Any, Dict, List, Optional, Tuple
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
NODE_LABEL, RELATION_START_KEY, RELATION_START_LABEL, RELATION_END_KEY, \
......@@ -9,7 +9,6 @@ from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable, NODE_KEY, \
class Watermark(Neo4jCsvSerializable):
# type: (...) -> None
"""
Table watermark result model.
Each instance represents one row of table watermark result.
......@@ -21,20 +20,19 @@ class Watermark(Neo4jCsvSerializable):
TABLE_WATERMARK_RELATION_TYPE = 'WATERMARK'
def __init__(self,
create_time, # type: str
database, # type: str
schema, # type: str
table_name, # type: str
part_name, # type: str
part_type='high_watermark', # type: str
cluster='gold', # type: str
):
# type: (...) -> None
create_time: str,
database: str,
schema: str,
table_name: str,
part_name: str,
part_type: str='high_watermark',
cluster: str='gold',
) -> None:
self.create_time = create_time
self.database = database.lower()
self.schema = schema.lower()
self.table = table_name.lower()
self.parts = [] # type: list
self.parts: List[Tuple[str, str]] = []
if '=' not in part_name:
raise Exception('Only partition table has high watermark')
......@@ -48,38 +46,33 @@ class Watermark(Neo4jCsvSerializable):
self._node_iter = iter(self.create_nodes())
self._relation_iter = iter(self.create_relation())
def create_next_node(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_node(self) -> Optional[Dict[str, Any]]:
# return the string representation of the data
try:
return next(self._node_iter)
except StopIteration:
return None
def create_next_relation(self):
# type: (...) -> Union[Dict[str, Any], None]
def create_next_relation(self) -> Optional[Dict[str, Any]]:
try:
return next(self._relation_iter)
except StopIteration:
return None
def get_watermark_model_key(self):
# type: (...) -> str
def get_watermark_model_key(self) -> str:
return Watermark.KEY_FORMAT.format(database=self.database,
cluster=self.cluster,
schema=self.schema,
table=self.table,
part_type=self.part_type)
def get_metadata_model_key(self):
# type: (...) -> str
def get_metadata_model_key(self) -> str:
return '{database}://{cluster}.{schema}/{table}'.format(database=self.database,
cluster=self.cluster,
schema=self.schema,
table=self.table)
def create_nodes(self):
# type: () -> List[Dict[str, Any]]
def create_nodes(self) -> List[Dict[str, Any]]:
"""
Create a list of Neo4j node records
:return:
......@@ -95,8 +88,7 @@ class Watermark(Neo4jCsvSerializable):
})
return results
def create_relation(self):
# type: () -> List[Dict[str, Any]]
def create_relation(self) -> List[Dict[str, Any]]:
"""
Create a list of relation map between watermark record with original table
:return:
......
......@@ -3,12 +3,12 @@
import abc
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from typing import List
from databuilder import Scoped
from databuilder.callback import call_back
from databuilder.callback.call_back import Callback # noqa: F401
from databuilder.callback.call_back import Callback
class Publisher(Scoped):
......
......@@ -5,8 +5,8 @@ import json
import logging
from elasticsearch.exceptions import NotFoundError
from pyhocon import ConfigTree # noqa: F401
from typing import List # noqa: F401
from pyhocon import ConfigTree
from typing import List
from databuilder.publisher.base_publisher import Publisher
from databuilder.publisher.elasticsearch_constants import TABLE_ELASTICSEARCH_INDEX_MAPPING
......
......@@ -11,12 +11,12 @@ from os import listdir
from os.path import isfile, join
from string import Template
from neo4j import GraphDatabase, Transaction # noqa: F401
from neo4j import GraphDatabase, Transaction
import neo4j
from neo4j.exceptions import CypherError
from pyhocon import ConfigFactory # noqa: F401
from pyhocon import ConfigTree # noqa: F401
from typing import Set, List # noqa: F401
from pyhocon import ConfigFactory
from pyhocon import ConfigTree
from typing import Set, List
from databuilder.publisher.base_publisher import Publisher
from databuilder.publisher.neo4j_preprocessor import NoopRelationPreprocessor
......@@ -133,7 +133,7 @@ class Neo4jCsvPublisher(Publisher):
def init(self, conf: ConfigTree) -> None:
conf = conf.with_fallback(DEFAULT_CONFIG)
self._count = 0 # type: int
self._count: int = 0
self._progress_report_frequency = conf.get_int(NEO4J_PROGRESS_REPORT_FREQUENCY)
self._node_files = self._list_files(conf, NODE_FILES_DIR)
self._node_files_iter = iter(self._node_files)
......@@ -156,8 +156,8 @@ class Neo4jCsvPublisher(Publisher):
# config is list of node label.
# When set, this list specifies a list of nodes that shouldn't be updated, if exists
self.create_only_nodes = set(conf.get_list(NEO4J_CREATE_ONLY_NODES, default=[]))
self.labels = set() # type: Set[str]
self.publish_tag = conf.get_string(JOB_PUBLISH_TAG) # type: str
self.labels: Set[str] = set()
self.publish_tag: str = conf.get_string(JOB_PUBLISH_TAG)
if not self.publish_tag:
raise Exception('{} should not be empty'.format(JOB_PUBLISH_TAG))
......
......@@ -4,7 +4,7 @@
import abc
import logging
from typing import Iterable, Any, Dict, Iterator # noqa: F401
from typing import Iterable, Any, Dict, Iterator
LOGGER = logging.getLogger(__name__)
......
......@@ -3,9 +3,9 @@
import logging
import requests # noqa: F401
import requests
from jsonpath_rw import parse
from typing import Any, Dict # noqa: F401
from typing import Any, Dict
from databuilder.rest_api.rest_api_query import RestApiQuery
......
......@@ -7,7 +7,7 @@ import logging
import requests
from jsonpath_rw import parse
from retrying import retry
from typing import List, Dict, Any, Union, Iterator, Callable # noqa: F401
from typing import List, Dict, Any, Union, Iterator, Callable
from databuilder.rest_api.base_rest_api_query import BaseRestApiQuery
......@@ -147,10 +147,10 @@ class RestApiQuery(BaseRestApiQuery):
continue
raise e
response_json = response.json() # type: Union[List[Any], Dict[str, Any]]
response_json: Union[List[Any], Dict[str, Any]] = response.json()
# value extraction via JSON Path
result_list = [match.value for match in self._jsonpath_expr.find(response_json)] # type: List[Any]
result_list: List[Any] = [match.value for match in self._jsonpath_expr.find(response_json)]
if not result_list:
log_msg = 'No result from URL: {url} , JSONPATH: {json_path} , response payload: {response}' \
......@@ -188,10 +188,7 @@ class RestApiQuery(BaseRestApiQuery):
return self._url.format(**record)
@retry(stop_max_attempt_number=5, wait_exponential_multiplier=1000, wait_exponential_max=10000)
def _send_request(self,
url # type: str
):
# type: (...) -> requests.Response
def _send_request(self, url: str) -> requests.Response:
"""
Performs HTTP GET operation with retry on failure.
:param url:
......
......@@ -3,7 +3,7 @@
import abc
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder import Scoped
......
......@@ -5,14 +5,14 @@ import logging
import textwrap
import time
from neo4j import GraphDatabase # noqa: F401
from neo4j import GraphDatabase
import neo4j
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
from typing import Dict, Iterable, Any, List # noqa: F401
from pyhocon import ConfigFactory, ConfigTree
from typing import Any, Dict, Iterable
from databuilder import Scoped
from databuilder.publisher.neo4j_csv_publisher import JOB_PUBLISH_TAG
from databuilder.task.base_task import Task # noqa: F401
from databuilder.task.base_task import Task
# A end point for Neo4j e.g: bolt://localhost:9999
NEO4J_END_POINT_KEY = 'neo4j_endpoint'
......@@ -182,9 +182,11 @@ class Neo4jStalenessRemovalTask(Task):
break
LOGGER.info('Deleted {} stale data of {}'.format(total_count, t))
def _validate_staleness_pct(self, total_records, stale_records, types):
# type: (Iterable[Dict[str, Any]], Iterable[Dict[str, Any]], Iterable[str]) -> None
def _validate_staleness_pct(self,
total_records: Iterable[Dict[str, Any]],
stale_records: Iterable[Dict[str, Any]],
types: Iterable[str]
) -> None:
total_count_dict = {record['type']: int(record['count']) for record in total_records}
for record in stale_records:
......@@ -204,9 +206,7 @@ class Neo4jStalenessRemovalTask(Task):
raise Exception('Staleness percentage of {} is {} %. Stopping due to over threshold {} %'
.format(type_str, stale_pct, threshold))
def _validate_node_staleness_pct(self):
# type: () -> None
def _validate_node_staleness_pct(self) -> None:
total_nodes_statement = textwrap.dedent("""
MATCH (n)
WITH DISTINCT labels(n) as node, count(*) as count
......@@ -229,8 +229,7 @@ class Neo4jStalenessRemovalTask(Task):
stale_records=stale_records,
types=self.target_nodes)
def _validate_relation_staleness_pct(self):
# type: () -> None
def _validate_relation_staleness_pct(self) -> None:
total_relations_statement = textwrap.dedent("""
MATCH ()-[r]-()
RETURN type(r) as type, count(*) as count;
......
......@@ -3,15 +3,15 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder import Scoped
from databuilder.extractor.base_extractor import Extractor # noqa: F401
from databuilder.loader.base_loader import Loader # noqa: F401
from databuilder.task.base_task import Task # noqa: F401
from databuilder.transformer.base_transformer import Transformer # noqa: F401
from databuilder.extractor.base_extractor import Extractor
from databuilder.loader.base_loader import Loader
from databuilder.task.base_task import Task
from databuilder.transformer.base_transformer import Transformer
from databuilder.transformer.base_transformer \
import NoopTransformer # noqa: F401
import NoopTransformer
from databuilder.utils.closer import Closer
......
......@@ -3,8 +3,8 @@
import abc
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Iterable, Optional # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Iterable, Optional
from databuilder import Scoped
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from pyhocon import ConfigTree # noqa: F401
from typing import Dict, Optional, Tuple # noqa: F401
from pyhocon import ConfigTree
from typing import Optional, Tuple
from databuilder.transformer.base_transformer import Transformer
from databuilder.models.table_column_usage import ColumnReader, TableColumnUsage
......
......@@ -4,8 +4,8 @@
import importlib
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Dict # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Dict
from databuilder.transformer.base_transformer import Transformer
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Dict # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Dict
from databuilder.transformer.base_transformer import Transformer
......
......@@ -2,8 +2,8 @@
# SPDX-License-Identifier: Apache-2.0
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Any # noqa: F401
from pyhocon import ConfigTree
from typing import Any
from databuilder.transformer.base_transformer import Transformer
......
......@@ -2,9 +2,9 @@
# SPDX-License-Identifier: Apache-2.0
import logging
from typing import Any, Dict # noqa: F401
from typing import Any, Dict
from pyhocon import ConfigTree # noqa: F401
from pyhocon import ConfigTree
from databuilder.transformer.base_transformer import Transformer
......
......@@ -3,8 +3,8 @@
import logging
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Dict # noqa: F401
from pyhocon import ConfigTree
from typing import Any, Dict
from databuilder.transformer.base_transformer import Transformer
......
......@@ -4,9 +4,9 @@
import logging
from datetime import datetime
from pyhocon import ConfigFactory # noqa: F401
from pyhocon import ConfigTree # noqa: F401
from typing import Any, Dict # noqa: F401
from pyhocon import ConfigFactory
from pyhocon import ConfigTree
from typing import Any, Dict
from databuilder.transformer.base_transformer import Transformer
......
......@@ -3,7 +3,7 @@
import atexit
from typing import Callable, List # noqa: F401
from typing import Callable, List
class Closer(object):
......
......@@ -5,7 +5,7 @@ import logging
import unittest
from mock import patch
from pyhocon import ConfigFactory # noqa: F401
from pyhocon import ConfigFactory
from typing import Any, Dict, List
from databuilder import Scoped
......
......@@ -3,7 +3,7 @@
import unittest
from pyhocon import ConfigFactory # noqa: F401
from pyhocon import ConfigFactory
from databuilder.extractor.restapi.rest_api_extractor import RestAPIExtractor, REST_API_QUERY, MODEL_CLASS, \
STATIC_RECORD_DICT
......
......@@ -6,7 +6,7 @@ import unittest
from mock import patch, MagicMock
from pyhocon import ConfigFactory
from typing import Any, Dict # noqa: F401
from typing import Any, Dict
from databuilder.extractor.athena_metadata_extractor import AthenaMetadataExtractor
from databuilder.extractor.sql_alchemy_extractor import SQLAlchemyExtractor
......
......@@ -7,7 +7,7 @@ from collections import OrderedDict
from mock import patch
from pyhocon import ConfigFactory
from typing import Any, Dict # noqa: F401
from typing import Any
from cassandra.metadata import ColumnMetadata as CassandraColumnMetadata
from databuilder.extractor.cassandra_extractor import CassandraExtractor
......
......@@ -3,7 +3,7 @@
import unittest
from pyhocon import ConfigFactory # noqa: F401
from pyhocon import ConfigFactory
from databuilder import Scoped
from databuilder.extractor.csv_extractor import CsvExtractor
......
......@@ -3,7 +3,7 @@
import unittest
from pyhocon import ConfigFactory # noqa: F401
from pyhocon import ConfigFactory
from databuilder import Scoped
from databuilder.extractor.generic_extractor import GenericExtractor
......
......@@ -6,7 +6,6 @@ import unittest
from mock import patch
from pyhocon import ConfigFactory
from typing import Any, Dict # noqa: F401
from databuilder.extractor.glue_extractor import GlueExtractor
from databuilder.models.table_metadata import TableMetadata, ColumnMetadata
......
......@@ -9,7 +9,7 @@ from pytz import UTC
from mock import patch, MagicMock
from pyhocon import ConfigFactory
from typing import Any, Iterable, Iterator, Dict, Optional, TypeVar # noqa: F401
from typing import Iterable, Iterator, Optional, TypeVar
from databuilder.extractor.hive_table_last_updated_extractor import HiveTableLastUpdatedExtractor
from databuilder.extractor.sql_alchemy_extractor import SQLAlchemyExtractor
......
......@@ -6,7 +6,7 @@ import unittest
from mock import patch, MagicMock
from pyhocon import ConfigFactory
from typing import Any, Dict # noqa: F401
from typing import Any, Dict
from databuilder.extractor.hive_table_metadata_extractor import HiveTableMetadataExtractor
from databuilder.extractor.sql_alchemy_extractor import SQLAlchemyExtractor
......@@ -225,8 +225,7 @@ class TestHiveTableMetadataExtractorWithWhereClause(unittest.TestCase):
extractor.init(self.conf)
self.assertTrue(self.where_clause_suffix in extractor.sql_stmt)
def test_hive_sql_statement_with_custom_sql(self):
# type: () -> None
def test_hive_sql_statement_with_custom_sql(self) -> None:
"""
Test Extraction by providing a custom sql
:return:
......
......@@ -6,7 +6,7 @@ import unittest
from mock import patch, MagicMock
from pyhocon import ConfigFactory
from typing import Any, Dict # noqa: F401
from typing import Any, Dict
from databuilder.extractor.mssql_metadata_extractor import MSSQLMetadataExtractor
from databuilder.extractor.sql_alchemy_extractor import SQLAlchemyExtractor
......
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment