Unverified Commit 381a4170 authored by Dorian Johnson's avatar Dorian Johnson Committed by GitHub

chore: remove python2 (#310)

* ci: remove py2 ci (#280)

(cherry picked from commit 2ac583cf)

* Remove all usages of six

* makefile: remove defunct python2 test command

* setup.py: bump version to 3.0.0

This change removes python2 support. Python 3.6 or up are now required.

There are no other breaking changes in this release.
Co-authored-by: 's avatarTao Feng <fengtao04@gmail.com>
parent d24cba9d
language: python
python:
- '2.7'
- '3.6'
install:
- pip install -r requirements.txt
......
......@@ -4,7 +4,8 @@ clean:
rm -rf dist/
.PHONY: test_unit
test_unit: test_unit2_or3_if_its_default
test_unit:
python3 -bb -m pytest tests
lint:
flake8 .
......@@ -12,9 +13,3 @@ lint:
.PHONY: test
test: test_unit lint
.PHONY: test_unit
test_unit2_or3_if_its_default:
python -bb -m pytest tests/unit
test_unit3:
python3 -bb -m pytest tests/unit
......@@ -13,7 +13,7 @@ Amundsen Databuilder is a data ingestion library, which is inspired by [Apache G
For information about Amundsen and our other services, visit the [main repository](https://github.com/lyft/amundsen#amundsen) `README.md` . Please also see our instructions for a [quick start](https://github.com/lyft/amundsen/blob/master/docs/installation.md#bootstrap-a-default-version-of-amundsen-using-docker) setup of Amundsen with dummy data, and an [overview of the architecture](https://github.com/lyft/amundsen/blob/master/docs/architecture.md#architecture).
## Requirements
- Python = 2.7.x or Python >= 3.6.x
- Python >= 3.6.x
## Doc
- https://lyft.github.io/amundsen/
......
......@@ -2,13 +2,11 @@
# SPDX-License-Identifier: Apache-2.0
import abc
import six
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
@six.add_metaclass(abc.ABCMeta)
class Scoped(object):
class Scoped(object, metaclass=abc.ABCMeta):
_EMPTY_CONFIG = ConfigFactory.from_dict({})
"""
An interface for class that works with scoped (nested) config.
......
......@@ -3,15 +3,13 @@
import abc
import logging
import six
from typing import List, Optional # noqa: F401
LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class Callback(object):
class Callback(object, metaclass=abc.ABCMeta):
"""
A callback interface that expected to fire "on_success" if the operation is successful, else "on_failure" if
operation failed.
......
......@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0
import logging
import six
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
......@@ -60,11 +59,7 @@ class Db2MetadataExtractor(Extractor):
cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(Db2MetadataExtractor.DATABASE_KEY, default='db2')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
self._database = conf.get_string(Db2MetadataExtractor.DATABASE_KEY, default='db2')
self.sql_stmt = Db2MetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(Db2MetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
......
......@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0
import logging
import six
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
......@@ -86,13 +85,9 @@ class MSSQLMetadataExtractor(Extractor):
else:
cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(
self._database = conf.get_string(
MSSQLMetadataExtractor.DATABASE_KEY,
default='mssql')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
config_where_clause = conf.get_string(
MSSQLMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY)
......
......@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0
import logging
import six
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
......@@ -69,11 +68,7 @@ class MysqlMetadataExtractor(Extractor):
else:
cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(MysqlMetadataExtractor.DATABASE_KEY, default='mysql')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
self._database = conf.get_string(MysqlMetadataExtractor.DATABASE_KEY, default='mysql')
self.sql_stmt = MysqlMetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(MysqlMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
......
......@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0
import logging
import six
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
......@@ -64,11 +63,7 @@ class PostgresMetadataExtractor(Extractor):
else:
cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(PostgresMetadataExtractor.DATABASE_KEY, default='postgres')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
self._database = conf.get_string(PostgresMetadataExtractor.DATABASE_KEY, default='postgres')
self.sql_stmt = PostgresMetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(PostgresMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
......
......@@ -3,7 +3,6 @@
import logging
import six
from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401
......@@ -84,10 +83,6 @@ class SnowflakeMetadataExtractor(Extractor):
self._database = conf.get_string(SnowflakeMetadataExtractor.DATABASE_KEY)
self._snowflake_database = conf.get_string(SnowflakeMetadataExtractor.SNOWFLAKE_DATABASE_KEY)
if six.PY2:
self._database = self._database.encode('utf-8', 'ignore')
self._snowflake_database = self._snowflake_database.encode('utf-8', 'ignore')
self.sql_stmt = SnowflakeMetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(SnowflakeMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
cluster_source=cluster_source,
......
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
import six
import csv
import logging
import os
import shutil
......@@ -17,11 +17,6 @@ from databuilder.models.neo4j_csv_serde import NODE_LABEL, \
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable # noqa: F401
from databuilder.utils.closer import Closer
if six.PY2:
import unicodecsv as csv
else:
import csv
LOGGER = logging.getLogger(__name__)
......@@ -170,15 +165,9 @@ class FsNeo4jCSVLoader(Loader):
LOGGER.info('Creating file for {}'.format(key))
if six.PY2:
file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w')
writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(),
quoting=csv.QUOTE_NONNUMERIC, encoding='utf-8')
else:
file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w', encoding='utf8')
writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(),
quoting=csv.QUOTE_NONNUMERIC)
file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w', encoding='utf8')
writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(),
quoting=csv.QUOTE_NONNUMERIC)
def file_out_close():
# type: () -> None
......
......@@ -3,7 +3,6 @@
import abc
import six
from typing import Dict, Set, Any, Union # noqa: F401
NODE_KEY = 'KEY'
......@@ -24,8 +23,7 @@ LABELS = {NODE_LABEL, RELATION_START_LABEL, RELATION_END_LABEL}
TYPES = {RELATION_TYPE, RELATION_REVERSE_TYPE}
@six.add_metaclass(abc.ABCMeta)
class Neo4jCsvSerializable(object):
class Neo4jCsvSerializable(object, metaclass=abc.ABCMeta):
"""
A Serializable abstract class asks subclass to implement next node or
next relation in dict form so that it can be serialized to CSV file.
......@@ -120,7 +118,7 @@ class Neo4jCsvSerializable(object):
required_count = 0
for header_col, val_col in \
((header_col, val_col) for header_col, val_col
in six.iteritems(val_dict) if header_col in required_set):
in val_dict.items() if header_col in required_set):
required_count += 1
if header_col in LABELS:
......
......@@ -3,7 +3,6 @@
import copy
from collections import namedtuple
from six import string_types
from typing import Iterable, Any, Union, Iterator, Dict, Set # noqa: F401
......@@ -334,7 +333,7 @@ class TableMetadata(Neo4jCsvSerializable):
@staticmethod
def format_tags(tags):
if isinstance(tags, string_types):
if isinstance(tags, str):
tags = list(filter(None, tags.split(',')))
if isinstance(tags, list):
tags = [tag.lower().strip() for tag in tags]
......
......@@ -11,7 +11,6 @@ from os import listdir
from os.path import isfile, join
from string import Template
import six
from neo4j import GraphDatabase, Transaction # noqa: F401
import neo4j
from neo4j.exceptions import CypherError
......@@ -389,7 +388,7 @@ ON MATCH SET {update_prop_body}""".format(create_prop_body=create_prop_body,
"""
template_params = {}
props = []
for k, v in six.iteritems(record_dict):
for k, v in record_dict.items():
if k in excludes:
template_params[k] = v
continue
......@@ -437,10 +436,7 @@ ON MATCH SET {update_prop_body}""".format(create_prop_body=create_prop_body,
if LOGGER.isEnabledFor(logging.DEBUG):
LOGGER.debug('Executing statement: {} with params {}'.format(stmt, params))
if six.PY2:
result = tx.run(unicode(stmt, errors='ignore'), parameters=params) # noqa
else:
result = tx.run(str(stmt).encode('utf-8', 'ignore'), parameters=params)
result = tx.run(str(stmt).encode('utf-8', 'ignore'), parameters=params)
if expect_result and not result.single():
raise RuntimeError('Failed to executed statement: {}'.format(stmt))
......
......@@ -4,14 +4,12 @@
import abc
import logging
import six
import textwrap
LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class RelationPreprocessor(object):
class RelationPreprocessor(object, metaclass=abc.ABCMeta):
"""
A Preprocessor for relations. Prior to publish Neo4j relations, RelationPreprocessor will be used for
pre-processing.
......
......@@ -4,14 +4,12 @@
import abc
import logging
import six
from typing import Iterable, Any, Dict, Iterator # noqa: F401
LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class BaseRestApiQuery(object):
class BaseRestApiQuery(object, metaclass=abc.ABCMeta):
@abc.abstractmethod
def execute(self):
......
......@@ -3,13 +3,11 @@
import abc
import six
from requests.exceptions import HTTPError
from typing import Iterable, Union, List, Dict, Any, Optional # noqa: F401
@six.add_metaclass(abc.ABCMeta)
class BaseFailureHandler(object):
class BaseFailureHandler(object, metaclass=abc.ABCMeta):
@abc.abstractmethod
def can_skip_failure(self,
......
......@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0
import logging
import six
from pyhocon import ConfigTree # noqa: F401
from typing import Any # noqa: F401
......@@ -38,13 +37,9 @@ class RegexStrReplaceTransformer(Transformer):
else:
val = getattr(record, self._attribute_name)
if val is None or not isinstance(val, six.string_types):
if val is None or not isinstance(val, str):
return record
# Encode unicode string
if six.PY2:
val = val.encode('utf-8', 'ignore')
for regex_replace_tuple in self._regex_replace_tuples:
val = val.replace(regex_replace_tuple[0], regex_replace_tuple[1])
......
......@@ -5,7 +5,7 @@ import os
from setuptools import setup, find_packages
__version__ = '2.6.5'
__version__ = '3.0.0'
requirements_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'requirements.txt')
with open(requirements_path) as requirements_file:
......@@ -56,7 +56,7 @@ setup(
packages=find_packages(exclude=['tests*']),
dependency_links=[],
install_requires=requirements,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*',
python_requires='>=3.6,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*',
extras_require={
':python_version=="2.7"': ['typing>=3.6'], # allow typehinting PY2
'all': all_deps,
......@@ -71,7 +71,6 @@ setup(
'druid': druid,
},
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
......
......@@ -3,7 +3,6 @@
import json
from mock import MagicMock, mock_open, patch
import six
import unittest
from pyhocon import ConfigFactory
......@@ -38,10 +37,7 @@ class TestElasticsearchPublisher(unittest.TestCase):
"""
Test Publish functionality with no data
"""
target = 'builtins.open'
if six.PY2:
target = '__builtin__.open'
with patch(target, mock_open(read_data='')) as mock_file:
with patch('builtins.open', mock_open(read_data='')) as mock_file:
publisher = ElasticsearchPublisher()
publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf,
scope=publisher.get_scope()))
......@@ -62,10 +58,7 @@ class TestElasticsearchPublisher(unittest.TestCase):
'KEY_DOESNOT_MATTER2': 'NO_VALUE2'})
self.mock_es_client.indices.get_alias.return_value = {}
target = 'builtins.open'
if six.PY2:
target = '__builtin__.open'
with patch(target, mock_open(read_data=mock_data)) as mock_file:
with patch('builtins.open', mock_open(read_data=mock_data)) as mock_file:
publisher = ElasticsearchPublisher()
publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf,
scope=publisher.get_scope()))
......@@ -99,10 +92,7 @@ class TestElasticsearchPublisher(unittest.TestCase):
'KEY_DOESNOT_MATTER2': 'NO_VALUE2'})
self.mock_es_client.indices.get_alias.return_value = {'test_old_index': 'DOES_NOT_MATTER'}
target = 'builtins.open'
if six.PY2:
target = '__builtin__.open'
with patch(target, mock_open(read_data=mock_data)) as mock_file:
with patch('builtins.open', mock_open(read_data=mock_data)) as mock_file:
publisher = ElasticsearchPublisher()
publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf,
scope=publisher.get_scope()))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment