Unverified Commit 381a4170 authored by Dorian Johnson's avatar Dorian Johnson Committed by GitHub

chore: remove python2 (#310)

* ci: remove py2 ci (#280)

(cherry picked from commit 2ac583cf)

* Remove all usages of six

* makefile: remove defunct python2 test command

* setup.py: bump version to 3.0.0

This change removes python2 support. Python 3.6 or up are now required.

There are no other breaking changes in this release.
Co-authored-by: 's avatarTao Feng <fengtao04@gmail.com>
parent d24cba9d
language: python language: python
python: python:
- '2.7'
- '3.6' - '3.6'
install: install:
- pip install -r requirements.txt - pip install -r requirements.txt
......
...@@ -4,7 +4,8 @@ clean: ...@@ -4,7 +4,8 @@ clean:
rm -rf dist/ rm -rf dist/
.PHONY: test_unit .PHONY: test_unit
test_unit: test_unit2_or3_if_its_default test_unit:
python3 -bb -m pytest tests
lint: lint:
flake8 . flake8 .
...@@ -12,9 +13,3 @@ lint: ...@@ -12,9 +13,3 @@ lint:
.PHONY: test .PHONY: test
test: test_unit lint test: test_unit lint
.PHONY: test_unit
test_unit2_or3_if_its_default:
python -bb -m pytest tests/unit
test_unit3:
python3 -bb -m pytest tests/unit
...@@ -13,7 +13,7 @@ Amundsen Databuilder is a data ingestion library, which is inspired by [Apache G ...@@ -13,7 +13,7 @@ Amundsen Databuilder is a data ingestion library, which is inspired by [Apache G
For information about Amundsen and our other services, visit the [main repository](https://github.com/lyft/amundsen#amundsen) `README.md` . Please also see our instructions for a [quick start](https://github.com/lyft/amundsen/blob/master/docs/installation.md#bootstrap-a-default-version-of-amundsen-using-docker) setup of Amundsen with dummy data, and an [overview of the architecture](https://github.com/lyft/amundsen/blob/master/docs/architecture.md#architecture). For information about Amundsen and our other services, visit the [main repository](https://github.com/lyft/amundsen#amundsen) `README.md` . Please also see our instructions for a [quick start](https://github.com/lyft/amundsen/blob/master/docs/installation.md#bootstrap-a-default-version-of-amundsen-using-docker) setup of Amundsen with dummy data, and an [overview of the architecture](https://github.com/lyft/amundsen/blob/master/docs/architecture.md#architecture).
## Requirements ## Requirements
- Python = 2.7.x or Python >= 3.6.x - Python >= 3.6.x
## Doc ## Doc
- https://lyft.github.io/amundsen/ - https://lyft.github.io/amundsen/
......
...@@ -2,13 +2,11 @@ ...@@ -2,13 +2,11 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import abc import abc
import six
from pyhocon import ConfigTree, ConfigFactory # noqa: F401 from pyhocon import ConfigTree, ConfigFactory # noqa: F401
@six.add_metaclass(abc.ABCMeta) class Scoped(object, metaclass=abc.ABCMeta):
class Scoped(object):
_EMPTY_CONFIG = ConfigFactory.from_dict({}) _EMPTY_CONFIG = ConfigFactory.from_dict({})
""" """
An interface for class that works with scoped (nested) config. An interface for class that works with scoped (nested) config.
......
...@@ -3,15 +3,13 @@ ...@@ -3,15 +3,13 @@
import abc import abc
import logging import logging
import six
from typing import List, Optional # noqa: F401 from typing import List, Optional # noqa: F401
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta) class Callback(object, metaclass=abc.ABCMeta):
class Callback(object):
""" """
A callback interface that expected to fire "on_success" if the operation is successful, else "on_failure" if A callback interface that expected to fire "on_success" if the operation is successful, else "on_failure" if
operation failed. operation failed.
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import logging import logging
import six
from collections import namedtuple from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401 from pyhocon import ConfigFactory, ConfigTree # noqa: F401
...@@ -60,11 +59,7 @@ class Db2MetadataExtractor(Extractor): ...@@ -60,11 +59,7 @@ class Db2MetadataExtractor(Extractor):
cluster_source = "'{}'".format(self._cluster) cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(Db2MetadataExtractor.DATABASE_KEY, default='db2') self._database = conf.get_string(Db2MetadataExtractor.DATABASE_KEY, default='db2')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
self.sql_stmt = Db2MetadataExtractor.SQL_STATEMENT.format( self.sql_stmt = Db2MetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(Db2MetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY), where_clause_suffix=conf.get_string(Db2MetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import logging import logging
import six
from collections import namedtuple from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401 from pyhocon import ConfigFactory, ConfigTree # noqa: F401
...@@ -86,13 +85,9 @@ class MSSQLMetadataExtractor(Extractor): ...@@ -86,13 +85,9 @@ class MSSQLMetadataExtractor(Extractor):
else: else:
cluster_source = "'{}'".format(self._cluster) cluster_source = "'{}'".format(self._cluster)
database = conf.get_string( self._database = conf.get_string(
MSSQLMetadataExtractor.DATABASE_KEY, MSSQLMetadataExtractor.DATABASE_KEY,
default='mssql') default='mssql')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
config_where_clause = conf.get_string( config_where_clause = conf.get_string(
MSSQLMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY) MSSQLMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY)
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import logging import logging
import six
from collections import namedtuple from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401 from pyhocon import ConfigFactory, ConfigTree # noqa: F401
...@@ -69,11 +68,7 @@ class MysqlMetadataExtractor(Extractor): ...@@ -69,11 +68,7 @@ class MysqlMetadataExtractor(Extractor):
else: else:
cluster_source = "'{}'".format(self._cluster) cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(MysqlMetadataExtractor.DATABASE_KEY, default='mysql') self._database = conf.get_string(MysqlMetadataExtractor.DATABASE_KEY, default='mysql')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
self.sql_stmt = MysqlMetadataExtractor.SQL_STATEMENT.format( self.sql_stmt = MysqlMetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(MysqlMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY), where_clause_suffix=conf.get_string(MysqlMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import logging import logging
import six
from collections import namedtuple from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401 from pyhocon import ConfigFactory, ConfigTree # noqa: F401
...@@ -64,11 +63,7 @@ class PostgresMetadataExtractor(Extractor): ...@@ -64,11 +63,7 @@ class PostgresMetadataExtractor(Extractor):
else: else:
cluster_source = "'{}'".format(self._cluster) cluster_source = "'{}'".format(self._cluster)
database = conf.get_string(PostgresMetadataExtractor.DATABASE_KEY, default='postgres') self._database = conf.get_string(PostgresMetadataExtractor.DATABASE_KEY, default='postgres')
if six.PY2 and isinstance(database, six.text_type):
database = database.encode('utf-8', 'ignore')
self._database = database
self.sql_stmt = PostgresMetadataExtractor.SQL_STATEMENT.format( self.sql_stmt = PostgresMetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(PostgresMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY), where_clause_suffix=conf.get_string(PostgresMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
import logging import logging
import six
from collections import namedtuple from collections import namedtuple
from pyhocon import ConfigFactory, ConfigTree # noqa: F401 from pyhocon import ConfigFactory, ConfigTree # noqa: F401
...@@ -84,10 +83,6 @@ class SnowflakeMetadataExtractor(Extractor): ...@@ -84,10 +83,6 @@ class SnowflakeMetadataExtractor(Extractor):
self._database = conf.get_string(SnowflakeMetadataExtractor.DATABASE_KEY) self._database = conf.get_string(SnowflakeMetadataExtractor.DATABASE_KEY)
self._snowflake_database = conf.get_string(SnowflakeMetadataExtractor.SNOWFLAKE_DATABASE_KEY) self._snowflake_database = conf.get_string(SnowflakeMetadataExtractor.SNOWFLAKE_DATABASE_KEY)
if six.PY2:
self._database = self._database.encode('utf-8', 'ignore')
self._snowflake_database = self._snowflake_database.encode('utf-8', 'ignore')
self.sql_stmt = SnowflakeMetadataExtractor.SQL_STATEMENT.format( self.sql_stmt = SnowflakeMetadataExtractor.SQL_STATEMENT.format(
where_clause_suffix=conf.get_string(SnowflakeMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY), where_clause_suffix=conf.get_string(SnowflakeMetadataExtractor.WHERE_CLAUSE_SUFFIX_KEY),
cluster_source=cluster_source, cluster_source=cluster_source,
......
# Copyright Contributors to the Amundsen project. # Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import six import csv
import logging import logging
import os import os
import shutil import shutil
...@@ -17,11 +17,6 @@ from databuilder.models.neo4j_csv_serde import NODE_LABEL, \ ...@@ -17,11 +17,6 @@ from databuilder.models.neo4j_csv_serde import NODE_LABEL, \
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable # noqa: F401 from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable # noqa: F401
from databuilder.utils.closer import Closer from databuilder.utils.closer import Closer
if six.PY2:
import unicodecsv as csv
else:
import csv
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
...@@ -170,12 +165,6 @@ class FsNeo4jCSVLoader(Loader): ...@@ -170,12 +165,6 @@ class FsNeo4jCSVLoader(Loader):
LOGGER.info('Creating file for {}'.format(key)) LOGGER.info('Creating file for {}'.format(key))
if six.PY2:
file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w')
writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(),
quoting=csv.QUOTE_NONNUMERIC, encoding='utf-8')
else:
file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w', encoding='utf8') file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w', encoding='utf8')
writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(), writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(),
quoting=csv.QUOTE_NONNUMERIC) quoting=csv.QUOTE_NONNUMERIC)
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
import abc import abc
import six
from typing import Dict, Set, Any, Union # noqa: F401 from typing import Dict, Set, Any, Union # noqa: F401
NODE_KEY = 'KEY' NODE_KEY = 'KEY'
...@@ -24,8 +23,7 @@ LABELS = {NODE_LABEL, RELATION_START_LABEL, RELATION_END_LABEL} ...@@ -24,8 +23,7 @@ LABELS = {NODE_LABEL, RELATION_START_LABEL, RELATION_END_LABEL}
TYPES = {RELATION_TYPE, RELATION_REVERSE_TYPE} TYPES = {RELATION_TYPE, RELATION_REVERSE_TYPE}
@six.add_metaclass(abc.ABCMeta) class Neo4jCsvSerializable(object, metaclass=abc.ABCMeta):
class Neo4jCsvSerializable(object):
""" """
A Serializable abstract class asks subclass to implement next node or A Serializable abstract class asks subclass to implement next node or
next relation in dict form so that it can be serialized to CSV file. next relation in dict form so that it can be serialized to CSV file.
...@@ -120,7 +118,7 @@ class Neo4jCsvSerializable(object): ...@@ -120,7 +118,7 @@ class Neo4jCsvSerializable(object):
required_count = 0 required_count = 0
for header_col, val_col in \ for header_col, val_col in \
((header_col, val_col) for header_col, val_col ((header_col, val_col) for header_col, val_col
in six.iteritems(val_dict) if header_col in required_set): in val_dict.items() if header_col in required_set):
required_count += 1 required_count += 1
if header_col in LABELS: if header_col in LABELS:
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
import copy import copy
from collections import namedtuple from collections import namedtuple
from six import string_types
from typing import Iterable, Any, Union, Iterator, Dict, Set # noqa: F401 from typing import Iterable, Any, Union, Iterator, Dict, Set # noqa: F401
...@@ -334,7 +333,7 @@ class TableMetadata(Neo4jCsvSerializable): ...@@ -334,7 +333,7 @@ class TableMetadata(Neo4jCsvSerializable):
@staticmethod @staticmethod
def format_tags(tags): def format_tags(tags):
if isinstance(tags, string_types): if isinstance(tags, str):
tags = list(filter(None, tags.split(','))) tags = list(filter(None, tags.split(',')))
if isinstance(tags, list): if isinstance(tags, list):
tags = [tag.lower().strip() for tag in tags] tags = [tag.lower().strip() for tag in tags]
......
...@@ -11,7 +11,6 @@ from os import listdir ...@@ -11,7 +11,6 @@ from os import listdir
from os.path import isfile, join from os.path import isfile, join
from string import Template from string import Template
import six
from neo4j import GraphDatabase, Transaction # noqa: F401 from neo4j import GraphDatabase, Transaction # noqa: F401
import neo4j import neo4j
from neo4j.exceptions import CypherError from neo4j.exceptions import CypherError
...@@ -389,7 +388,7 @@ ON MATCH SET {update_prop_body}""".format(create_prop_body=create_prop_body, ...@@ -389,7 +388,7 @@ ON MATCH SET {update_prop_body}""".format(create_prop_body=create_prop_body,
""" """
template_params = {} template_params = {}
props = [] props = []
for k, v in six.iteritems(record_dict): for k, v in record_dict.items():
if k in excludes: if k in excludes:
template_params[k] = v template_params[k] = v
continue continue
...@@ -437,9 +436,6 @@ ON MATCH SET {update_prop_body}""".format(create_prop_body=create_prop_body, ...@@ -437,9 +436,6 @@ ON MATCH SET {update_prop_body}""".format(create_prop_body=create_prop_body,
if LOGGER.isEnabledFor(logging.DEBUG): if LOGGER.isEnabledFor(logging.DEBUG):
LOGGER.debug('Executing statement: {} with params {}'.format(stmt, params)) LOGGER.debug('Executing statement: {} with params {}'.format(stmt, params))
if six.PY2:
result = tx.run(unicode(stmt, errors='ignore'), parameters=params) # noqa
else:
result = tx.run(str(stmt).encode('utf-8', 'ignore'), parameters=params) result = tx.run(str(stmt).encode('utf-8', 'ignore'), parameters=params)
if expect_result and not result.single(): if expect_result and not result.single():
raise RuntimeError('Failed to executed statement: {}'.format(stmt)) raise RuntimeError('Failed to executed statement: {}'.format(stmt))
......
...@@ -4,14 +4,12 @@ ...@@ -4,14 +4,12 @@
import abc import abc
import logging import logging
import six
import textwrap import textwrap
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta) class RelationPreprocessor(object, metaclass=abc.ABCMeta):
class RelationPreprocessor(object):
""" """
A Preprocessor for relations. Prior to publish Neo4j relations, RelationPreprocessor will be used for A Preprocessor for relations. Prior to publish Neo4j relations, RelationPreprocessor will be used for
pre-processing. pre-processing.
......
...@@ -4,14 +4,12 @@ ...@@ -4,14 +4,12 @@
import abc import abc
import logging import logging
import six
from typing import Iterable, Any, Dict, Iterator # noqa: F401 from typing import Iterable, Any, Dict, Iterator # noqa: F401
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta) class BaseRestApiQuery(object, metaclass=abc.ABCMeta):
class BaseRestApiQuery(object):
@abc.abstractmethod @abc.abstractmethod
def execute(self): def execute(self):
......
...@@ -3,13 +3,11 @@ ...@@ -3,13 +3,11 @@
import abc import abc
import six
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from typing import Iterable, Union, List, Dict, Any, Optional # noqa: F401 from typing import Iterable, Union, List, Dict, Any, Optional # noqa: F401
@six.add_metaclass(abc.ABCMeta) class BaseFailureHandler(object, metaclass=abc.ABCMeta):
class BaseFailureHandler(object):
@abc.abstractmethod @abc.abstractmethod
def can_skip_failure(self, def can_skip_failure(self,
......
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import logging import logging
import six
from pyhocon import ConfigTree # noqa: F401 from pyhocon import ConfigTree # noqa: F401
from typing import Any # noqa: F401 from typing import Any # noqa: F401
...@@ -38,13 +37,9 @@ class RegexStrReplaceTransformer(Transformer): ...@@ -38,13 +37,9 @@ class RegexStrReplaceTransformer(Transformer):
else: else:
val = getattr(record, self._attribute_name) val = getattr(record, self._attribute_name)
if val is None or not isinstance(val, six.string_types): if val is None or not isinstance(val, str):
return record return record
# Encode unicode string
if six.PY2:
val = val.encode('utf-8', 'ignore')
for regex_replace_tuple in self._regex_replace_tuples: for regex_replace_tuple in self._regex_replace_tuples:
val = val.replace(regex_replace_tuple[0], regex_replace_tuple[1]) val = val.replace(regex_replace_tuple[0], regex_replace_tuple[1])
......
...@@ -5,7 +5,7 @@ import os ...@@ -5,7 +5,7 @@ import os
from setuptools import setup, find_packages from setuptools import setup, find_packages
__version__ = '2.6.5' __version__ = '3.0.0'
requirements_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'requirements.txt') requirements_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'requirements.txt')
with open(requirements_path) as requirements_file: with open(requirements_path) as requirements_file:
...@@ -56,7 +56,7 @@ setup( ...@@ -56,7 +56,7 @@ setup(
packages=find_packages(exclude=['tests*']), packages=find_packages(exclude=['tests*']),
dependency_links=[], dependency_links=[],
install_requires=requirements, install_requires=requirements,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*', python_requires='>=3.6,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*',
extras_require={ extras_require={
':python_version=="2.7"': ['typing>=3.6'], # allow typehinting PY2 ':python_version=="2.7"': ['typing>=3.6'], # allow typehinting PY2
'all': all_deps, 'all': all_deps,
...@@ -71,7 +71,6 @@ setup( ...@@ -71,7 +71,6 @@ setup(
'druid': druid, 'druid': druid,
}, },
classifiers=[ classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.7',
], ],
......
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
import json import json
from mock import MagicMock, mock_open, patch from mock import MagicMock, mock_open, patch
import six
import unittest import unittest
from pyhocon import ConfigFactory from pyhocon import ConfigFactory
...@@ -38,10 +37,7 @@ class TestElasticsearchPublisher(unittest.TestCase): ...@@ -38,10 +37,7 @@ class TestElasticsearchPublisher(unittest.TestCase):
""" """
Test Publish functionality with no data Test Publish functionality with no data
""" """
target = 'builtins.open' with patch('builtins.open', mock_open(read_data='')) as mock_file:
if six.PY2:
target = '__builtin__.open'
with patch(target, mock_open(read_data='')) as mock_file:
publisher = ElasticsearchPublisher() publisher = ElasticsearchPublisher()
publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf, publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf,
scope=publisher.get_scope())) scope=publisher.get_scope()))
...@@ -62,10 +58,7 @@ class TestElasticsearchPublisher(unittest.TestCase): ...@@ -62,10 +58,7 @@ class TestElasticsearchPublisher(unittest.TestCase):
'KEY_DOESNOT_MATTER2': 'NO_VALUE2'}) 'KEY_DOESNOT_MATTER2': 'NO_VALUE2'})
self.mock_es_client.indices.get_alias.return_value = {} self.mock_es_client.indices.get_alias.return_value = {}
target = 'builtins.open' with patch('builtins.open', mock_open(read_data=mock_data)) as mock_file:
if six.PY2:
target = '__builtin__.open'
with patch(target, mock_open(read_data=mock_data)) as mock_file:
publisher = ElasticsearchPublisher() publisher = ElasticsearchPublisher()
publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf, publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf,
scope=publisher.get_scope())) scope=publisher.get_scope()))
...@@ -99,10 +92,7 @@ class TestElasticsearchPublisher(unittest.TestCase): ...@@ -99,10 +92,7 @@ class TestElasticsearchPublisher(unittest.TestCase):
'KEY_DOESNOT_MATTER2': 'NO_VALUE2'}) 'KEY_DOESNOT_MATTER2': 'NO_VALUE2'})
self.mock_es_client.indices.get_alias.return_value = {'test_old_index': 'DOES_NOT_MATTER'} self.mock_es_client.indices.get_alias.return_value = {'test_old_index': 'DOES_NOT_MATTER'}
target = 'builtins.open' with patch('builtins.open', mock_open(read_data=mock_data)) as mock_file:
if six.PY2:
target = '__builtin__.open'
with patch(target, mock_open(read_data=mock_data)) as mock_file:
publisher = ElasticsearchPublisher() publisher = ElasticsearchPublisher()
publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf, publisher.init(conf=Scoped.get_scoped_conf(conf=self.conf,
scope=publisher.get_scope())) scope=publisher.get_scope()))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment