Update hacking for Python3
The repo is Python 3 now, so update hacking to version 3.0 which supports Python 3. Fix problems found. Update local hacking checks for new flake8. # to unbreak gate: Depends-on: https://review.opendev.org/715835 Change-Id: Icc2f4368cc90689d74510ce36fe77d2346aec625
This commit is contained in:
parent
d96ed3fa1a
commit
2e3d8ab80d
@ -39,7 +39,7 @@ class Zone:
|
||||
self.to_file(sys.stdout)
|
||||
|
||||
def to_file(self, f):
|
||||
if type(f) is 'file':
|
||||
if type(f) == 'file':
|
||||
fd = f
|
||||
elif type(f) is str:
|
||||
if os.path.isdir(f):
|
||||
@ -203,5 +203,6 @@ def main():
|
||||
except IOError as e:
|
||||
LOG.error(e)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
@ -104,7 +104,7 @@ class BlacklistsController(rest.RestController):
|
||||
response = pecan.response
|
||||
|
||||
if request.content_type == 'application/json-patch+json':
|
||||
raise NotImplemented('json-patch not implemented')
|
||||
raise NotImplementedError('json-patch not implemented')
|
||||
|
||||
# Fetch the existing blacklist entry
|
||||
blacklist = self.central_api.get_blacklist(context, blacklist_id)
|
||||
|
@ -108,7 +108,7 @@ class PoolsController(rest.RestController):
|
||||
response = pecan.response
|
||||
|
||||
if request.content_type == 'application/json-patch+json':
|
||||
raise NotImplemented('json-patch not implemented')
|
||||
raise NotImplementedError('json-patch not implemented')
|
||||
|
||||
# Fetch the existing pool
|
||||
pool = self.central_api.get_pool(context, pool_id)
|
||||
|
@ -98,7 +98,7 @@ class TldsController(rest.RestController):
|
||||
body = request.body_dict
|
||||
response = pecan.response
|
||||
if request.content_type == 'application/json-patch+json':
|
||||
raise NotImplemented('json-patch not implemented')
|
||||
raise NotImplementedError('json-patch not implemented')
|
||||
|
||||
# Fetch the existing tld
|
||||
tld = self.central_api.get_tld(context, tld_id)
|
||||
|
@ -100,7 +100,7 @@ class TsigKeysController(rest.RestController):
|
||||
response = pecan.response
|
||||
|
||||
if request.content_type == 'application/json-patch+json':
|
||||
raise NotImplemented('json-patch not implemented')
|
||||
raise NotImplementedError('json-patch not implemented')
|
||||
|
||||
# Fetch the existing tsigkey entry
|
||||
tsigkey = self.central_api.get_tsigkey(context, tsigkey_id)
|
||||
|
@ -155,7 +155,7 @@ class ZonesController(rest.RestController):
|
||||
# 1) "Nested" resources? records inside a recordset.
|
||||
# 2) What to do when a zone doesn't exist in the first place?
|
||||
# 3) ...?
|
||||
raise NotImplemented('json-patch not implemented')
|
||||
raise NotImplementedError('json-patch not implemented')
|
||||
else:
|
||||
# Update the zone object with the new values
|
||||
zone = DesignateAdapter.parse('API_v2', body, zone)
|
||||
|
@ -121,7 +121,7 @@ class TransferRequestsController(rest.RestController):
|
||||
response = pecan.response
|
||||
|
||||
if request.content_type == 'application/json-patch+json':
|
||||
raise NotImplemented('json-patch not implemented')
|
||||
raise NotImplementedError('json-patch not implemented')
|
||||
|
||||
# Fetch the existing zone_transfer_request
|
||||
zone_transfer_request = self.central_api.get_zone_transfer_request(
|
||||
|
@ -149,7 +149,7 @@ class GdnsdBackend(base.AgentBackend):
|
||||
def _generate_zone_filename(self, zone_name):
|
||||
"""Generate a filename for a zone file
|
||||
"/" is traslated into "@"
|
||||
Non-valid characters are translated into \ NNN
|
||||
Non-valid characters are translated into \\ NNN
|
||||
where NNN is a decimal integer in the range 0 - 255
|
||||
The filename is lowercase
|
||||
|
||||
|
@ -160,7 +160,7 @@ class Knot2Backend(base.AgentBackend):
|
||||
try:
|
||||
serial = out.split('|')[1].split()[1]
|
||||
return int(serial)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
LOG.error("Unable to parse knotc output: %r", out)
|
||||
raise exceptions.Backend("Unexpected knotc zone-status output")
|
||||
|
||||
|
@ -52,17 +52,17 @@ class NSD4Backend(base.Backend):
|
||||
self.pattern = self.options.get('pattern', 'slave')
|
||||
|
||||
def _command(self, command):
|
||||
sock = eventlet.wrap_ssl(
|
||||
eventlet.connect((self.host, self.port)),
|
||||
keyfile=self.keyfile,
|
||||
certfile=self.certfile)
|
||||
stream = sock.makefile()
|
||||
stream.write('%s %s\n' % (self.NSDCT_VERSION, command))
|
||||
stream.flush()
|
||||
result = stream.read()
|
||||
stream.close()
|
||||
sock.close()
|
||||
return result
|
||||
sock = eventlet.wrap_ssl(
|
||||
eventlet.connect((self.host, self.port)),
|
||||
keyfile=self.keyfile,
|
||||
certfile=self.certfile)
|
||||
stream = sock.makefile()
|
||||
stream.write('%s %s\n' % (self.NSDCT_VERSION, command))
|
||||
stream.flush()
|
||||
result = stream.read()
|
||||
stream.close()
|
||||
sock.close()
|
||||
return result
|
||||
|
||||
def _execute_nsd4(self, command):
|
||||
try:
|
||||
|
@ -50,7 +50,7 @@ class PDNS4Backend(base.Backend):
|
||||
self._build_url(zone=zone.name),
|
||||
headers=self.headers,
|
||||
)
|
||||
return zone.status_code is 200
|
||||
return zone.status_code == 200
|
||||
|
||||
def create_zone(self, context, zone):
|
||||
"""Create a DNS zone"""
|
||||
|
@ -14,6 +14,7 @@
|
||||
# under the License.
|
||||
import re
|
||||
|
||||
from hacking import core
|
||||
import pycodestyle
|
||||
|
||||
# D701: Default parameter value is a mutable type
|
||||
@ -34,17 +35,18 @@ mutable_default_argument_check = re.compile(
|
||||
string_translation = re.compile(r"[^_]*_\(\s*('|\")")
|
||||
translated_log = re.compile(
|
||||
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)"
|
||||
"\(\s*_\(\s*('|\")")
|
||||
r"\(\s*_\(\s*('|\")")
|
||||
underscore_import_check = re.compile(r"(.)*import _(.)*")
|
||||
# We need this for cases where they have created their own _ function.
|
||||
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
|
||||
graduated_oslo_libraries_import_re = re.compile(
|
||||
r"^\s*(?:import|from) designate\.openstack\.common\.?.*?"
|
||||
"(gettextutils|rpc)"
|
||||
".*?")
|
||||
r"(gettextutils|rpc)"
|
||||
r".*?")
|
||||
|
||||
|
||||
def mutable_default_arguments(logical_line, physical_line, filename):
|
||||
@core.flake8ext
|
||||
def mutable_default_arguments(physical_line, logical_line, filename):
|
||||
if pycodestyle.noqa(physical_line):
|
||||
return
|
||||
|
||||
@ -52,6 +54,7 @@ def mutable_default_arguments(logical_line, physical_line, filename):
|
||||
yield (0, "D701: Default parameter value is a mutable type")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_translate_debug_logs(logical_line, filename):
|
||||
"""Check for 'LOG.debug(_('
|
||||
As per our translation policy,
|
||||
@ -66,6 +69,7 @@ def no_translate_debug_logs(logical_line, filename):
|
||||
yield(0, "D706: Don't translate debug level logs")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_explicit_underscore_import(logical_line, filename):
|
||||
"""Check for explicit import of the _ function
|
||||
|
||||
@ -86,6 +90,7 @@ def check_explicit_underscore_import(logical_line, filename):
|
||||
yield(0, "D703: Found use of _() without explicit import of _!")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_import_graduated_oslo_libraries(logical_line, filename):
|
||||
"""Check that we don't continue to use o.c. oslo libraries after graduation
|
||||
|
||||
@ -105,6 +110,7 @@ def no_import_graduated_oslo_libraries(logical_line, filename):
|
||||
"graduated!" % matches.group(1))
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def use_timeutils_utcnow(logical_line, filename):
|
||||
# tools are OK to use the standard datetime module
|
||||
if "/tools/" in filename:
|
||||
@ -119,6 +125,7 @@ def use_timeutils_utcnow(logical_line, filename):
|
||||
yield (pos, msg % f)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_no_basestring(logical_line):
|
||||
if re.search(r"\bbasestring\b", logical_line):
|
||||
msg = ("D707: basestring is not Python3-compatible, use "
|
||||
@ -126,12 +133,14 @@ def check_no_basestring(logical_line):
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_python3_xrange(logical_line):
|
||||
if re.search(r"\bxrange\s*\(", logical_line):
|
||||
yield(0, "D708: Do not use xrange. Use range, or six.moves.range for "
|
||||
"large loops.")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_no_log_audit(logical_line):
|
||||
"""Ensure that we are not using LOG.audit messages
|
||||
Plans are in place going forward as discussed in the following
|
||||
@ -141,14 +150,3 @@ def check_no_log_audit(logical_line):
|
||||
"""
|
||||
if "LOG.audit(" in logical_line:
|
||||
yield(0, "D709: LOG.audit is deprecated, please use LOG.info!")
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(mutable_default_arguments)
|
||||
register(no_translate_debug_logs)
|
||||
register(check_explicit_underscore_import)
|
||||
register(no_import_graduated_oslo_libraries)
|
||||
register(use_timeutils_utcnow)
|
||||
register(check_no_basestring)
|
||||
register(check_python3_xrange)
|
||||
register(check_no_log_audit)
|
||||
|
@ -52,6 +52,7 @@ class HookLog(object):
|
||||
if name in self.LVLS:
|
||||
return functools.partial(self.capture, self.LVLS[name])
|
||||
|
||||
|
||||
LOG = HookLog()
|
||||
|
||||
|
||||
|
@ -20,7 +20,8 @@ from designate import objects
|
||||
class NotSpecifiedSential:
|
||||
pass
|
||||
|
||||
REQUIRED_RE = re.compile("\'([\w]*)\' is a required property")
|
||||
|
||||
REQUIRED_RE = re.compile(r"\'([\w]*)\' is a required property")
|
||||
|
||||
|
||||
class ValidationErrorAPIv2Adapter(base.APIv2Adapter):
|
||||
|
@ -36,7 +36,7 @@ class ZoneMasterAPIv2Adapter(base.APIv2Adapter):
|
||||
|
||||
@classmethod
|
||||
def _render_object(cls, object, *arg, **kwargs):
|
||||
if object.port is 53:
|
||||
if object.port == 53:
|
||||
return object.host
|
||||
else:
|
||||
return "%(host)s:%(port)d" % object.to_dict()
|
||||
|
@ -288,7 +288,7 @@ class DesignateObject(base.VersionedObject):
|
||||
else:
|
||||
try:
|
||||
field.coerce(self, name, value) # Check value
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
raise exceptions.InvalidObject(
|
||||
"{} is invalid".format(name))
|
||||
elif not field.nullable:
|
||||
|
@ -121,7 +121,7 @@ class RecordSet(base.DesignateObject, base.DictObjectMixin,
|
||||
try:
|
||||
record_list_cls = self.obj_cls_from_name('%sList' % self.type)
|
||||
record_cls = self.obj_cls_from_name(self.type)
|
||||
except (KeyError, ovo_exc.UnsupportedObjectError) as e:
|
||||
except (KeyError, ovo_exc.UnsupportedObjectError):
|
||||
err_msg = ("'%(type)s' is not a valid record type"
|
||||
% {'type': self.type})
|
||||
self._validate_fail(errors, err_msg)
|
||||
|
@ -61,7 +61,7 @@ class Scheduler(object):
|
||||
"""
|
||||
pools = self.storage.find_pools(context)
|
||||
|
||||
if len(self.filters) is 0:
|
||||
if len(self.filters) == 0:
|
||||
raise exceptions.NoFiltersConfigured('There are no scheduling '
|
||||
'filters configured')
|
||||
|
||||
@ -75,7 +75,7 @@ class Scheduler(object):
|
||||
|
||||
if len(pools) > 1:
|
||||
raise exceptions.MultiplePoolsFound()
|
||||
if len(pools) is 0:
|
||||
if len(pools) == 0:
|
||||
raise exceptions.NoValidPoolFound('There are no pools that '
|
||||
'matched your request')
|
||||
return pools[0].id
|
||||
|
@ -40,7 +40,7 @@ class FallbackFilter(base.Filter):
|
||||
"""
|
||||
|
||||
def filter(self, context, pools, zone):
|
||||
if len(pools) is 0:
|
||||
if len(pools) == 0:
|
||||
pools = objects.PoolList()
|
||||
pools.append(
|
||||
objects.Pool(id=cfg.CONF['service:central'].default_pool_id))
|
||||
|
@ -108,7 +108,7 @@ class Schema(object):
|
||||
schema = schema['properties']
|
||||
|
||||
with self.resolver.resolving(schema['$ref']) as ischema:
|
||||
schema = ischema
|
||||
schema = ischema
|
||||
|
||||
return [self.filter(i, schema) for i in instance]
|
||||
|
||||
|
@ -46,25 +46,25 @@ def type_draft3(validator, types, instance, schema):
|
||||
|
||||
|
||||
def oneOf_draft3(validator, oneOf, instance, schema):
|
||||
# Backported from Draft4 to Draft3
|
||||
subschemas = iter(oneOf)
|
||||
first_valid = next(
|
||||
(s for s in subschemas if validator.is_valid(instance, s)), None,
|
||||
)
|
||||
# Backported from Draft4 to Draft3
|
||||
subschemas = iter(oneOf)
|
||||
first_valid = next(
|
||||
(s for s in subschemas if validator.is_valid(instance, s)), None,
|
||||
)
|
||||
|
||||
if first_valid is None:
|
||||
if first_valid is None:
|
||||
yield jsonschema.ValidationError(
|
||||
"%r is not valid under any of the given schemas." % (instance,)
|
||||
)
|
||||
else:
|
||||
more_valid = [s for s in subschemas
|
||||
if validator.is_valid(instance, s)]
|
||||
if more_valid:
|
||||
more_valid.append(first_valid)
|
||||
reprs = ", ".join(repr(schema) for schema in more_valid)
|
||||
yield jsonschema.ValidationError(
|
||||
"%r is not valid under any of the given schemas." % (instance,)
|
||||
"%r is valid under each of %s" % (instance, reprs)
|
||||
)
|
||||
else:
|
||||
more_valid = [s for s in subschemas
|
||||
if validator.is_valid(instance, s)]
|
||||
if more_valid:
|
||||
more_valid.append(first_valid)
|
||||
reprs = ", ".join(repr(schema) for schema in more_valid)
|
||||
yield jsonschema.ValidationError(
|
||||
"%r is valid under each of %s" % (instance, reprs)
|
||||
)
|
||||
|
||||
|
||||
def type_draft4(validator, types, instance, schema):
|
||||
|
@ -49,18 +49,18 @@ def _set_object_from_model(obj, model, **extra):
|
||||
|
||||
|
||||
def _set_listobject_from_models(obj, models, map_=None):
|
||||
for model in models:
|
||||
extra = {}
|
||||
for model in models:
|
||||
extra = {}
|
||||
|
||||
if map_ is not None:
|
||||
extra = map_(model)
|
||||
if map_ is not None:
|
||||
extra = map_(model)
|
||||
|
||||
obj.objects.append(
|
||||
_set_object_from_model(obj.LIST_ITEM_TYPE(), model, **extra))
|
||||
obj.objects.append(
|
||||
_set_object_from_model(obj.LIST_ITEM_TYPE(), model, **extra))
|
||||
|
||||
obj.obj_reset_changes()
|
||||
obj.obj_reset_changes()
|
||||
|
||||
return obj
|
||||
return obj
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
|
@ -1798,5 +1798,5 @@ class SQLAlchemyStorage(sqlalchemy_base.SQLAlchemy, storage_base.Storage):
|
||||
def _rname_check(self, criterion):
|
||||
# If the criterion has 'name' in it, switch it out for reverse_name
|
||||
if criterion is not None and criterion.get('name', "").startswith('*'):
|
||||
criterion['reverse_name'] = criterion.pop('name')[::-1]
|
||||
criterion['reverse_name'] = criterion.pop('name')[::-1]
|
||||
return criterion
|
||||
|
@ -248,8 +248,8 @@ class CentralServiceTest(CentralTestCase):
|
||||
values['ttl'] = 0
|
||||
|
||||
with testtools.ExpectedException(exceptions.InvalidTTL):
|
||||
self.central_service._is_valid_ttl(
|
||||
context, values['ttl'])
|
||||
self.central_service._is_valid_ttl(
|
||||
context, values['ttl'])
|
||||
|
||||
# TLD Tests
|
||||
def test_create_tld(self):
|
||||
@ -721,8 +721,8 @@ class CentralServiceTest(CentralTestCase):
|
||||
values['ttl'] = 0
|
||||
|
||||
with testtools.ExpectedException(ValueError):
|
||||
self.central_service.create_zone(
|
||||
context, objects.Zone.from_dict(values))
|
||||
self.central_service.create_zone(
|
||||
context, objects.Zone.from_dict(values))
|
||||
|
||||
def test_create_zone_below_zero_ttl(self):
|
||||
self.policy({'use_low_ttl': '!'})
|
||||
|
@ -148,15 +148,15 @@ class NovaFixedHandlerTest(TestCase, NotificationHandlerMixin):
|
||||
fixture = self.get_notification_fixture('nova', event_type)
|
||||
with mock.patch.object(
|
||||
self.plugin, '_create_or_update_recordset') as finder:
|
||||
with mock.patch.object(self.plugin.central_api,
|
||||
'create_record'):
|
||||
finder.return_value = {'id': 'fakeid'}
|
||||
self.plugin.process_notification(
|
||||
self.admin_context.to_dict(),
|
||||
event_type, fixture['payload'])
|
||||
finder.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, type='A', zone_id=self.zone_id,
|
||||
name='private.example.com.')
|
||||
with mock.patch.object(self.plugin.central_api,
|
||||
'create_record'):
|
||||
finder.return_value = {'id': 'fakeid'}
|
||||
self.plugin.process_notification(
|
||||
self.admin_context.to_dict(),
|
||||
event_type, fixture['payload'])
|
||||
finder.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, type='A', zone_id=self.zone_id,
|
||||
name='private.example.com.')
|
||||
|
||||
def test_formatv4(self):
|
||||
event_type = 'compute.instance.create.end'
|
||||
@ -165,15 +165,15 @@ class NovaFixedHandlerTest(TestCase, NotificationHandlerMixin):
|
||||
fixture = self.get_notification_fixture('nova', event_type)
|
||||
with mock.patch.object(
|
||||
self.plugin, '_create_or_update_recordset') as finder:
|
||||
with mock.patch.object(self.plugin.central_api,
|
||||
'create_record'):
|
||||
finder.return_value = {'id': 'fakeid'}
|
||||
self.plugin.process_notification(
|
||||
self.admin_context.to_dict(),
|
||||
event_type, fixture['payload'])
|
||||
finder.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, type='A', zone_id=self.zone_id,
|
||||
name='private-v4.example.com.')
|
||||
with mock.patch.object(self.plugin.central_api,
|
||||
'create_record'):
|
||||
finder.return_value = {'id': 'fakeid'}
|
||||
self.plugin.process_notification(
|
||||
self.admin_context.to_dict(),
|
||||
event_type, fixture['payload'])
|
||||
finder.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, type='A', zone_id=self.zone_id,
|
||||
name='private-v4.example.com.')
|
||||
|
||||
def test_formatv6(self):
|
||||
event_type = 'compute.instance.create.end'
|
||||
@ -182,12 +182,12 @@ class NovaFixedHandlerTest(TestCase, NotificationHandlerMixin):
|
||||
fixture = self.get_notification_fixture('nova', event_type)
|
||||
with mock.patch.object(
|
||||
self.plugin, '_create_or_update_recordset') as finder:
|
||||
with mock.patch.object(self.plugin.central_api,
|
||||
'create_record'):
|
||||
finder.return_value = {'id': 'fakeid'}
|
||||
self.plugin.process_notification(
|
||||
self.admin_context.to_dict(),
|
||||
event_type, fixture['payload_v6'])
|
||||
finder.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, type='AAAA', zone_id=self.zone_id,
|
||||
name='private-v6.example.com.')
|
||||
with mock.patch.object(self.plugin.central_api,
|
||||
'create_record'):
|
||||
finder.return_value = {'id': 'fakeid'}
|
||||
self.plugin.process_notification(
|
||||
self.admin_context.to_dict(),
|
||||
event_type, fixture['payload_v6'])
|
||||
finder.assert_called_once_with(
|
||||
mock.ANY, mock.ANY, type='AAAA', zone_id=self.zone_id,
|
||||
name='private-v6.example.com.')
|
||||
|
@ -67,18 +67,18 @@ class NSD4BackendTestCase(designate.tests.TestCase):
|
||||
mock_connect.return_value = mock.sentinel.client
|
||||
mock_ssl.return_value = sock
|
||||
sock.makefile.return_value = stream
|
||||
if command_context is 'create_fail':
|
||||
if command_context == 'create_fail':
|
||||
stream.read.return_value = 'goat'
|
||||
else:
|
||||
stream.read.return_value = 'ok'
|
||||
|
||||
if command_context is 'create':
|
||||
if command_context == 'create':
|
||||
self.backend.create_zone(self.context, self.zone)
|
||||
command = 'NSDCT1 addzone %s test-pattern\n' % self.zone.name
|
||||
elif command_context is 'delete':
|
||||
elif command_context == 'delete':
|
||||
self.backend.delete_zone(self.context, self.zone)
|
||||
command = 'NSDCT1 delzone %s\n' % self.zone.name
|
||||
elif command_context is 'create_fail':
|
||||
elif command_context == 'create_fail':
|
||||
self.assertRaises(exceptions.Backend,
|
||||
self.backend.create_zone,
|
||||
self.context, self.zone)
|
||||
|
@ -198,8 +198,8 @@ class MockRecord(object):
|
||||
class MockPool(object):
|
||||
ns_records = [MockRecord(), ]
|
||||
|
||||
# Fixtures
|
||||
|
||||
# Fixtures
|
||||
fx_mdns_api = fixtures.MockPatch('designate.central.service.mdns_rpcapi')
|
||||
|
||||
mdns_api = mock.PropertyMock(
|
||||
|
@ -243,7 +243,7 @@ class TestDoAfxr(oslotest.base.BaseTestCase):
|
||||
|
||||
self.assertRaisesRegex(
|
||||
exceptions.XFRFailure,
|
||||
'XFR failed for example.com. No servers in \[\] was reached.',
|
||||
r'XFR failed for example.com. No servers in \[\] was reached.',
|
||||
dnsutils.do_axfr, 'example.com', masters,
|
||||
)
|
||||
|
||||
|
@ -158,7 +158,7 @@ def deep_dict_merge(a, b):
|
||||
|
||||
for k, v in b.items():
|
||||
if k in result and isinstance(result[k], dict):
|
||||
result[k] = deep_dict_merge(result[k], v)
|
||||
result[k] = deep_dict_merge(result[k], v)
|
||||
else:
|
||||
result[k] = copy.deepcopy(v)
|
||||
|
||||
|
@ -57,5 +57,6 @@ def main():
|
||||
)
|
||||
server.serve_forever()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
@ -3,7 +3,7 @@
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
# Hacking already pins down pep8, pyflakes and flake8
|
||||
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
||||
hacking>=3.0,<3.1.0 # Apache-2.0
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||
mock>=2.0.0 # BSD
|
||||
|
17
tox.ini
17
tox.ini
@ -173,14 +173,25 @@ ignore-path = .venv,.git,.tox,*designate/locale*,*lib/python*,*designate.egg*,ap
|
||||
# H904 Wrap long lines in parentheses instead of a backslash
|
||||
# E126 continuation line over-indented for hanging indent
|
||||
# E128 continuation line under-indented for visual indent
|
||||
|
||||
ignore = H105,H302,H306,H238,H402,H404,H405,H501,H904,E126,E128
|
||||
# W504 line break after binary operator
|
||||
ignore = H105,H302,H306,H238,H402,H404,H405,H501,H904,E126,E128,W504
|
||||
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,tools,.ropeproject
|
||||
|
||||
[hacking]
|
||||
local-check-factory = designate.hacking.checks.factory
|
||||
import_exceptions = designate.i18n
|
||||
|
||||
[flake8:local-plugins]
|
||||
extension =
|
||||
D701 = checks:mutable_default_arguments
|
||||
D703 = checks:check_explicit_underscore_import
|
||||
D704 = checks:no_import_graduated_oslo_libraries
|
||||
D705 = checks:use_timeutils_utcnow
|
||||
D706 = checks:no_translate_debug_logs
|
||||
D707 = checks:check_no_basestring
|
||||
D708 = checks:check_python3_xrange
|
||||
D709 = checks:check_no_log_audit
|
||||
paths = ./designate/hacking
|
||||
|
||||
[testenv:lower-constraints]
|
||||
deps =
|
||||
-c{toxinidir}/lower-constraints.txt
|
||||
|
Loading…
x
Reference in New Issue
Block a user