Skip to content

Commit

Permalink
cleanup: remove six
Browse files Browse the repository at this point in the history
  • Loading branch information
tobias-urdin committed Aug 3, 2023
1 parent a03e89f commit 84dd1dd
Show file tree
Hide file tree
Showing 41 changed files with 280 additions and 348 deletions.
3 changes: 1 addition & 2 deletions gnocchi/amqp1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import daiquiri
import proton.handlers
import proton.reactor
import six
import ujson

from gnocchi import incoming
Expand Down Expand Up @@ -61,7 +60,7 @@ def flush(self):
def _flush(self):
archive_policies = {}
resources = self._get_resources(self._measures.keys())
for host_id, measures_by_names in six.iteritems(self._measures):
for host_id, measures_by_names in self._measures.items():
resource = resources[host_id]

names = set(measures_by_names.keys())
Expand Down
7 changes: 3 additions & 4 deletions gnocchi/archive_policy.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import numpy
from oslo_config import cfg
from oslo_config import types
import six

from gnocchi import carbonara
from gnocchi import utils
Expand All @@ -40,7 +39,7 @@ class ArchivePolicy(object):
VALID_AGGREGATION_METHODS = set(
('mean', 'sum', 'last', 'max', 'min',
'std', 'median', 'first', 'count')).union(
set((str(i) + 'pct' for i in six.moves.range(1, 100))))
set((str(i) + 'pct' for i in range(1, 100))))

VALID_AGGREGATION_METHODS = VALID_AGGREGATION_METHODS.union(
set(map(lambda s: "rate:" + s,
Expand Down Expand Up @@ -240,13 +239,13 @@ def timespan(self):
def jsonify(self):
"""Return a dict representation with human readable values."""
return {
'timespan': six.text_type(
'timespan': str(
datetime.timedelta(
seconds=utils.timespan_total_seconds(
self.timespan)))
if self.timespan is not None
else None,
'granularity': six.text_type(
'granularity': str(
datetime.timedelta(
seconds=utils.timespan_total_seconds(
self.granularity))),
Expand Down
57 changes: 28 additions & 29 deletions gnocchi/carbonara.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@

import lz4.block
import numpy
import six

from gnocchi import calendar

Expand Down Expand Up @@ -232,7 +231,7 @@ def __init__(self, ts=None):
self.ts = ts

def __iter__(self):
return six.moves.zip(self.ts['timestamps'], self.ts['values'])
return zip(self.ts['timestamps'], self.ts['values'])

@classmethod
def from_data(cls, timestamps=None, values=None):
Expand Down Expand Up @@ -411,34 +410,34 @@ def benchmark(cls):
now = numpy.datetime64("2015-04-03 23:11")
timestamps = numpy.sort(numpy.array(
[now + numpy.timedelta64(random.randint(1000000, 10000000), 'us')
for i in six.moves.range(points)]))
for i in range(points)]))

print(cls.__name__)
print("=" * len(cls.__name__))

for title, values in [
("Simple continuous range", six.moves.range(points)),
("Simple continuous range", range(points)),
("All 0", [float(0)] * points),
("All 1", [float(1)] * points),
("0 and 1", [0, 1] * (points // 2)),
("1 and 0 random",
[random.randint(0, 1)
for x in six.moves.range(points)]),
for x in range(points)]),
("Small number random pos/neg",
[random.randint(-100000, 10000)
for x in six.moves.range(points)]),
for x in range(points)]),
("Small number random pos",
[random.randint(0, 20000) for x in six.moves.range(points)]),
[random.randint(0, 20000) for x in range(points)]),
("Small number random neg",
[random.randint(-20000, 0) for x in six.moves.range(points)]),
("Sin(x)", list(map(math.sin, six.moves.range(points)))),
[random.randint(-20000, 0) for x in range(points)]),
("Sin(x)", list(map(math.sin, range(points)))),
("random ", [random.random()
for x in six.moves.range(points)]),
for x in range(points)]),
]:
print(title)
ts = cls.from_data(timestamps, values)
t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
s = ts.serialize()
t1 = time.time()
print(" Serialization speed: %.2f MB/s"
Expand All @@ -447,7 +446,7 @@ def benchmark(cls):
print(" Bytes per point: %.2f" % (len(s) / float(points)))

t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
cls.unserialize(s, ONE_SECOND, 1)
t1 = time.time()
print(" Unserialization speed: %.2f MB/s"
Expand Down Expand Up @@ -637,7 +636,7 @@ def split(self):
round_timestamp(self.timestamps, freq),
return_counts=True)
start = 0
for key, count in six.moves.zip(keys, counts):
for key, count in zip(keys, counts):
end = start + count
yield (SplitKey(key, self.aggregation.granularity),
AggregatedTimeSerie(self.aggregation, self[start:end]))
Expand Down Expand Up @@ -680,7 +679,7 @@ def __repr__(self):
@staticmethod
def is_compressed(serialized_data):
"""Check whatever the data was serialized with compression."""
return six.indexbytes(serialized_data, 0) == ord("c")
return serialized_data[0] == ord("c")

@classmethod
def unserialize(cls, data, key, aggregation):
Expand Down Expand Up @@ -813,37 +812,37 @@ def benchmark(cls):
now = numpy.datetime64("2015-04-03 23:11")
timestamps = numpy.sort(numpy.array(
[now + i * sampling
for i in six.moves.range(points)]))
for i in range(points)]))

print(cls.__name__)
print("=" * len(cls.__name__))

for title, values in [
("Simple continuous range", six.moves.range(points)),
("Simple continuous range", range(points)),
("All 0", [float(0)] * points),
("All 1", [float(1)] * points),
("0 and 1", [0, 1] * (points // 2)),
("1 and 0 random",
[random.randint(0, 1)
for x in six.moves.range(points)]),
for x in range(points)]),
("Small number random pos/neg",
[random.randint(-100000, 10000)
for x in six.moves.range(points)]),
for x in range(points)]),
("Small number random pos",
[random.randint(0, 20000) for x in six.moves.range(points)]),
[random.randint(0, 20000) for x in range(points)]),
("Small number random neg",
[random.randint(-20000, 0) for x in six.moves.range(points)]),
("Sin(x)", list(map(math.sin, six.moves.range(points)))),
[random.randint(-20000, 0) for x in range(points)]),
("Sin(x)", list(map(math.sin, range(points)))),
("random ", [random.random()
for x in six.moves.range(points)]),
for x in range(points)]),
]:
print(title)
serialize_times = 50
aggregation = Aggregation("mean", sampling, None)
ts = cls.from_data(aggregation, timestamps, values)
t0 = time.time()
key = ts.get_split_key()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
e, s = ts.serialize(key, compressed=False)
t1 = time.time()
print(" Uncompressed serialization speed: %.2f MB/s"
Expand All @@ -852,15 +851,15 @@ def benchmark(cls):
print(" Bytes per point: %.2f" % (len(s) / float(points)))

t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
cls.unserialize(s, key, 'mean')
t1 = time.time()
print(" Unserialization speed: %.2f MB/s"
% (((points * 2 * 8)
/ ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))

t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
o, s = ts.serialize(key, compressed=True)
t1 = time.time()
print(" Compressed serialization speed: %.2f MB/s"
Expand All @@ -869,7 +868,7 @@ def benchmark(cls):
print(" Bytes per point: %.2f" % (len(s) / float(points)))

t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
cls.unserialize(s, key, 'mean')
t1 = time.time()
print(" Uncompression speed: %.2f MB/s"
Expand All @@ -880,7 +879,7 @@ def per_sec(t1, t0):
return 1 / ((t1 - t0) / serialize_times)

t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
list(ts.split())
t1 = time.time()
print(" split() speed: %.2f Hz" % per_sec(t1, t0))
Expand All @@ -894,7 +893,7 @@ def per_sec(t1, t0):
)

t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
ts.merge(tsbis)
t1 = time.time()
print(" merge() speed %.2f Hz" % per_sec(t1, t0))
Expand All @@ -904,7 +903,7 @@ def per_sec(t1, t0):
serialize_times = 3 if agg.endswith('pct') else 10
ts = cls(ts=pts, aggregation=aggregation)
t0 = time.time()
for i in six.moves.range(serialize_times):
for i in range(serialize_times):
ts.resample(resample)
t1 = time.time()
print(" resample(%s) speed: %.2f Hz"
Expand Down
3 changes: 1 addition & 2 deletions gnocchi/chef.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import hashlib

import daiquiri
import six

from gnocchi import indexer

Expand Down Expand Up @@ -124,7 +123,7 @@ def refresh_metrics(self, metrics, timeout=None, sync=False):
self.storage.add_measures_to_metrics({
metrics_by_id[metric_id]: measures
for metric_id, measures
in six.iteritems(metrics_and_measures)
in metrics_and_measures.items()
})
LOG.debug("Measures for %d metrics processed",
len(metric_ids))
Expand Down
3 changes: 1 addition & 2 deletions gnocchi/cli/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import daiquiri
from oslo_config import cfg
from oslo_config import generator
import six

from gnocchi import archive_policy
from gnocchi import incoming
Expand Down Expand Up @@ -81,7 +80,7 @@ def upgrade():
and not index.list_archive_policy_rules()):
if conf.skip_index:
index = indexer.get_driver(conf)
for name, ap in six.iteritems(archive_policy.DEFAULT_ARCHIVE_POLICIES):
for name, ap in archive_policy.DEFAULT_ARCHIVE_POLICIES.items():
index.create_archive_policy(ap)
index.create_archive_policy_rule("default", "*", "low")

Expand Down
5 changes: 2 additions & 3 deletions gnocchi/common/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
from __future__ import absolute_import

from oslo_config import cfg
import six
from six.moves.urllib import parse
from urllib import parse

try:
import redis
Expand Down Expand Up @@ -173,7 +172,7 @@ def get_client(conf, scripts=None):
if scripts is not None:
scripts = {
name: client.register_script(code)
for name, code in six.iteritems(scripts)
for name, code in scripts.items()
}

return client, scripts
2 changes: 1 addition & 1 deletion gnocchi/common/swift.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import daiquiri
from six.moves.urllib.parse import quote
from urllib.parse import quote

try:
from swiftclient import client as swclient
Expand Down
20 changes: 5 additions & 15 deletions gnocchi/gendoc.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import io
import json
import os
import subprocess
Expand All @@ -22,8 +23,6 @@

import jinja2
from oslo_config import generator
import six
import six.moves
from sphinx.util import logging
import webob.request
import yaml
Expand All @@ -49,10 +48,7 @@ def _extract_body(req_or_resp):
if not req_or_resp.text:
return ""

if six.PY2:
body = req_or_resp.body
else:
body = req_or_resp.text
body = req_or_resp.text
if req_or_resp.content_type.startswith("application/json"):
body = _format_json(body)
return "\n ".join(body.split("\n"))
Expand All @@ -61,7 +57,7 @@ def _extract_body(req_or_resp):
def _format_headers(headers):
return "\n".join(
" %s: %s" % (k, v)
for k, v in six.iteritems(headers))
for k, v in headers.items())


def _response_to_httpdomain(response):
Expand Down Expand Up @@ -209,10 +205,8 @@ def setup(app):
scenarios=scenarios)

template = jinja2.Template(entry['request'])
fake_file = six.moves.cStringIO()
fake_file = io.StringIO()
content = template.render(scenarios=scenarios)
if six.PY2:
content = content.encode('utf-8')
fake_file.write(content)
fake_file.seek(0)
request = webapp.RequestClass.from_file(fake_file)
Expand All @@ -228,7 +222,7 @@ def setup(app):
request.body = fake_file.read(clen)

LOG.info("Doing request %s: %s",
entry['name'], six.text_type(request))
entry['name'], str(request))
with webapp.use_admin_user():
response = webapp.request(request)
entry['response'] = response
Expand All @@ -238,13 +232,9 @@ def setup(app):
test.tearDownClass()
with open("doc/source/rest.j2", "r") as f:
content = f.read()
if six.PY2:
content = content.decode("utf-8")
template = jinja2.Template(content)
with open("doc/source/rest.rst", "w") as f:
content = template.render(scenarios=scenarios)
if six.PY2:
content = content.encode("utf-8")
f.write(content)

config_output_file = 'doc/source/gnocchi.conf.sample'
Expand Down
5 changes: 2 additions & 3 deletions gnocchi/incoming/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import daiquiri
import numpy
import six

from gnocchi.carbonara import TIMESERIES_ARRAY_DTYPE
from gnocchi import exceptions
Expand Down Expand Up @@ -194,7 +193,7 @@ def add_measures_batch(self, metrics_and_measures):
self.MAP_METHOD(self._store_new_measures,
((metric_id, self._encode_measures(measures))
for metric_id, measures
in six.iteritems(metrics_and_measures)))
in metrics_and_measures.items()))

@staticmethod
def _store_new_measures(metric_id, data):
Expand Down Expand Up @@ -245,7 +244,7 @@ def sack_for_metric(self, metric_id):
return self._make_sack(metric_id.int % self.NUM_SACKS)

def iter_sacks(self):
return (self._make_sack(i) for i in six.moves.range(self.NUM_SACKS))
return (self._make_sack(i) for i in range(self.NUM_SACKS))

@staticmethod
def iter_on_sacks_to_process():
Expand Down
Loading

0 comments on commit 84dd1dd

Please sign in to comment.