Skip to content

Commit

Permalink
Revert "Address high IOPs usage of the Gnocchi Ceph pool"
Browse files Browse the repository at this point in the history
This reverts commit 82d951d.

Conflicts:
	gnocchi/storage/ceph.py
  • Loading branch information
tobias-urdin committed Oct 7, 2024
1 parent a537a58 commit b8feb23
Showing 1 changed file with 6 additions and 42 deletions.
48 changes: 6 additions & 42 deletions gnocchi/storage/ceph.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
# License for the specific language governing permissions and limitations
# under the License.
import collections
import daiquiri

from oslo_config import cfg

Expand Down Expand Up @@ -43,11 +42,6 @@

rados = ceph.rados

LOG = daiquiri.getLogger(__name__)

DEFAULT_RADOS_BUFFER_SIZE = 8192
MAP_UNAGGREGATED_METRIC_NAME_BY_SIZE = {}


class CephStorage(storage.StorageDriver):
WRITE_FULL = False
Expand Down Expand Up @@ -94,13 +88,6 @@ def _store_metric_splits(self, metrics_keys_aggregations_data_offset,
for key, agg, data, offset in keys_aggregations_data_offset:
name = self._get_object_name(
metric, key, agg.method, version)
metric_size = len(data)

if metric_size > DEFAULT_RADOS_BUFFER_SIZE:
MAP_UNAGGREGATED_METRIC_NAME_BY_SIZE[name] = metric_size
LOG.debug(
"Storing time series size [%s] for metric [%s].",
metric_size, name)
if offset is None:
self.ioctx.write_full(name, data)
else:
Expand Down Expand Up @@ -166,14 +153,7 @@ def _get_splits_unbatched(self, metric, key, aggregation, version=3):
try:
name = self._get_object_name(
metric, key, aggregation.method, version)

metric_size = MAP_UNAGGREGATED_METRIC_NAME_BY_SIZE.get(
name, DEFAULT_RADOS_BUFFER_SIZE)

LOG.debug("Reading metric [%s] with buffer size of [%s].",
name, metric_size)

return self._get_object_content(name, buffer_size=metric_size)
return self._get_object_content(name)
except rados.ObjectNotFound:
return

Expand Down Expand Up @@ -226,16 +206,9 @@ def _build_unaggregated_timeserie_path(metric, version):

def _get_or_create_unaggregated_timeseries_unbatched(
self, metric, version=3):
metric_name = self._build_unaggregated_timeserie_path(metric, version)
metric_size = MAP_UNAGGREGATED_METRIC_NAME_BY_SIZE.get(
metric_name, DEFAULT_RADOS_BUFFER_SIZE)

LOG.debug("Reading unaggregated metric [%s] with buffer size of [%s].",
metric_name, metric_size)

try:
contents = self._get_object_content(
metric_name, buffer_size=metric_size)
self._build_unaggregated_timeserie_path(metric, version))
except rados.ObjectNotFound:
self._create_metric(metric)
else:
Expand All @@ -245,23 +218,14 @@ def _get_or_create_unaggregated_timeseries_unbatched(

def _store_unaggregated_timeseries_unbatched(
self, metric, data, version=3):
self.ioctx.write_full(
self._build_unaggregated_timeserie_path(metric, version), data)

metric_name = self._build_unaggregated_timeserie_path(metric, version)
metric_size = len(data)

if metric_size > DEFAULT_RADOS_BUFFER_SIZE:
MAP_UNAGGREGATED_METRIC_NAME_BY_SIZE[metric_name] = metric_size
LOG.debug(
"Storing unaggregated time series size [%s] for metric [%s]",
metric_size, metric_name)
self.ioctx.write_full(metric_name, data)

def _get_object_content(self, name, buffer_size=DEFAULT_RADOS_BUFFER_SIZE):
def _get_object_content(self, name):
offset = 0
content = b''

while True:
data = self.ioctx.read(name, length=buffer_size, offset=offset)
data = self.ioctx.read(name, offset=offset)
if not data:
break
content += data
Expand Down

0 comments on commit b8feb23

Please sign in to comment.