Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
path: root/sugar_network
diff options
context:
space:
mode:
authorAleksey Lim <alsroot@sugarlabs.org>2013-11-21 20:26:30 (GMT)
committer Aleksey Lim <alsroot@sugarlabs.org>2013-11-21 20:26:30 (GMT)
commitebb59c1da33fddff54529860ad3b12aaa41ea4e8 (patch)
treeab3b583065df88ba967be1ccff181f0ea062c074 /sugar_network
parent0e931f9bbb075f66f951057adec96afbf8b1c792 (diff)
Regenerate node stats
Diffstat (limited to 'sugar_network')
-rw-r--r--sugar_network/client/routes.py1
-rw-r--r--sugar_network/db/index.py2
-rw-r--r--sugar_network/model/user.py4
-rw-r--r--sugar_network/node/routes.py126
-rw-r--r--sugar_network/node/stats_node.py221
-rw-r--r--sugar_network/toolkit/__init__.py22
-rw-r--r--sugar_network/toolkit/http.py3
-rw-r--r--sugar_network/toolkit/router.py2
-rw-r--r--sugar_network/toolkit/rrd.py79
9 files changed, 259 insertions, 201 deletions
diff --git a/sugar_network/client/routes.py b/sugar_network/client/routes.py
index b4680c0..e43eea5 100644
--- a/sugar_network/client/routes.py
+++ b/sugar_network/client/routes.py
@@ -552,7 +552,6 @@ class _Auth(http.SugarAuth):
import gconf
conf = gconf.client_get_default()
self._profile['name'] = conf.get_string('/desktop/sugar/user/nick')
- self._profile['color'] = conf.get_string('/desktop/sugar/user/color')
return http.SugarAuth.profile(self)
def __call__(self, nonce):
diff --git a/sugar_network/db/index.py b/sugar_network/db/index.py
index 4de1382..ab40527 100644
--- a/sugar_network/db/index.py
+++ b/sugar_network/db/index.py
@@ -170,7 +170,7 @@ class IndexReader(object):
enquire = self._enquire(request, query, order_by, group_by)
mset = self._call_db(enquire.get_mset, offset, limit, check_at_least)
- _logger.debug('Found in %s: %s time=%s total=%s parsed=%s',
+ _logger.debug('Found in %s: query=%r time=%s total=%s parsed=%s',
self.metadata.name, query, time.time() - start_timestamp,
mset.get_matches_estimated(), enquire.get_query())
diff --git a/sugar_network/model/user.py b/sugar_network/model/user.py
index e35cce8..69d0d42 100644
--- a/sugar_network/model/user.py
+++ b/sugar_network/model/user.py
@@ -23,10 +23,6 @@ class User(db.Resource):
def name(self, value):
return value
- @db.stored_property()
- def color(self, value):
- return value
-
@db.indexed_property(prefix='P', full_text=True, default='')
def location(self, value):
return value
diff --git a/sugar_network/node/routes.py b/sugar_network/node/routes.py
index 1182f28..f7fb9f4 100644
--- a/sugar_network/node/routes.py
+++ b/sugar_network/node/routes.py
@@ -55,12 +55,14 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
self._auth_config_mtime = 0
if stats_node.stats_node.value:
- self._stats = stats_node.Sniffer(volume)
+ stats_path = join(node.stats_root.value, 'node')
+ self._stats = stats_node.Sniffer(volume, stats_path)
coroutine.spawn(self._commit_stats)
def close(self):
if self._stats is not None:
self._stats.commit()
+ self._stats.commit_objects()
@property
def guid(self):
@@ -402,6 +404,91 @@ class NodeRoutes(model.VolumeRoutes, model.FrontRoutes):
return result
+def generate_node_stats(volume, path):
+ tmp_path = toolkit.mkdtemp()
+ new_stats = stats_node.Sniffer(volume, tmp_path, True)
+ old_stats = stats_node.Sniffer(volume, path)
+
+ def timeline(ts):
+ ts = long(ts)
+ end = long(time.time())
+ step = None
+
+ archives = {}
+ for rra in stats_node.stats_node_rras.value:
+ a_step, a_size = [long(i) for i in rra.split(':')[-2:]]
+ a_step *= stats_node.stats_node_step.value
+ a_start = end - min(end, a_step * a_size)
+ if archives.setdefault(a_start, a_step) > a_step:
+ archives[a_start] = a_step
+ archives = list(sorted(archives.items()))
+
+ try:
+ while ts <= end:
+ while not step or archives and ts >= archives[0][0]:
+ archive_start, step = archives.pop(0)
+ ts = max(ts / step * step, archive_start)
+ yield ts, ts + step - 1, step
+ ts += step
+ except GeneratorExit:
+ shutil.rmtree(tmp_path, ignore_errors=True)
+
+ start = next(volume['context'].find(limit=1, order_by='ctime')[0])['ctime']
+ for left, right, step in timeline(start):
+ for resource, props in [
+ ('user', []),
+ ('context', []),
+ ('implementation', ['context']),
+ ('artifact', ['context']),
+ ('feedback', ['context']),
+ ('solution', ['context', 'feedback']),
+ ('review', ['context', 'artifact', 'rating']),
+ ('report', ['context', 'implementation']),
+ ('comment', ['context', 'review', 'feedback', 'solution']),
+ ]:
+ objs, __ = volume[resource].find(
+ query='ctime:%s..%s' % (left, right))
+ for obj in objs:
+ request = Request(method='POST', path=[resource],
+ content=obj.properties(props))
+ new_stats.log(request)
+ for resource, props in [
+ ('user', ['layer']),
+ ('context', ['layer']),
+ ('implementation', ['layer']),
+ ('artifact', ['layer']),
+ ('feedback', ['layer', 'solution']),
+ ('solution', ['layer']),
+ ('review', ['layer']),
+ ('report', ['layer']),
+ ('comment', ['layer']),
+ ]:
+ objs, __ = volume[resource].find(
+ query='mtime:%s..%s' % (left, right))
+ for obj in objs:
+ if 'deleted' in obj['layer']:
+ request = Request(method='DELETE',
+ path=[resource, obj.guid])
+ else:
+ request = Request(method='PUT', path=[resource, obj.guid],
+ content=obj.properties(props))
+ new_stats.log(request)
+ downloaded = {}
+ for resource in ('context', 'artifact'):
+ stats = old_stats.report(
+ {resource: ['downloaded']}, left - step, right, 1)
+ if not stats.get(resource):
+ continue
+ stats = stats[resource][-1][1].get('downloaded')
+ if stats:
+ downloaded[resource] = {'downloaded': stats}
+ new_stats.commit(left + (right - left) / 2, downloaded)
+
+ new_stats.commit_objects(True)
+ shutil.rmtree(path)
+ shutil.move(tmp_path, path)
+
+
@contextmanager
def load_bundle(volume, request, bundle_path):
impl = request.copy()
@@ -488,6 +575,14 @@ def load_bundle(volume, request, bundle_path):
def _load_context_metadata(bundle, spec):
+
+ def convert(data, w, h):
+ result = toolkit.svg_to_png(data.getvalue(), w, h)
+ return {'blob': result,
+ 'mime_type': 'image/png',
+ 'digest': hashlib.sha1(result.getvalue()).hexdigest(),
+ }
+
result = {}
for prop in ('homepage', 'mime_types'):
if spec[prop]:
@@ -503,8 +598,8 @@ def _load_context_metadata(bundle, spec):
'mime_type': 'image/svg+xml',
'digest': hashlib.sha1(icon.getvalue()).hexdigest(),
},
- 'preview': _svg_to_png(icon.getvalue(), 160, 120),
- 'icon': _svg_to_png(icon.getvalue(), 55, 55),
+ 'preview': convert(icon, 160, 120),
+ 'icon': convert(icon, 55, 55),
})
except Exception:
exception(_logger, 'Failed to load icon')
@@ -538,28 +633,3 @@ def _load_context_metadata(bundle, spec):
exception(_logger, 'Gettext failed to read %r', mo_path[-1])
return result
-
-
-def _svg_to_png(data, w, h):
- import rsvg
- import cairo
-
- svg = rsvg.Handle(data=data)
- surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, w, h)
- context = cairo.Context(surface)
-
- scale = min(float(w) / svg.props.width, float(h) / svg.props.height)
- context.translate(
- int(w - svg.props.width * scale) / 2,
- int(h - svg.props.height * scale) / 2)
- context.scale(scale, scale)
- svg.render_cairo(context)
-
- result = StringIO()
- surface.write_to_png(result)
- result.seek(0)
-
- return {'blob': result,
- 'mime_type': 'image/png',
- 'digest': hashlib.sha1(result.getvalue()).hexdigest(),
- }
diff --git a/sugar_network/node/stats_node.py b/sugar_network/node/stats_node.py
index 61a4a28..fb9ef5d 100644
--- a/sugar_network/node/stats_node.py
+++ b/sugar_network/node/stats_node.py
@@ -14,9 +14,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
-from os.path import join
-from sugar_network import node
from sugar_network.toolkit.rrd import Rrd
from sugar_network.toolkit import Option
@@ -32,11 +30,11 @@ stats_node_step = Option(
stats_node_rras = Option(
'comma separated list of RRAs for node RRD databases',
default=[
- 'RRA:AVERAGE:0.5:1:288', # one day with 5min step
- 'RRA:AVERAGE:0.5:3:672', # one week with 15min step
- 'RRA:AVERAGE:0.5:12:744', # one month with 1h step
- 'RRA:AVERAGE:0.5:144:732', # one year with 12h step
- 'RRA:AVERAGE:0.5:288:36600', # hundred years with 24h step
+ 'RRA:AVERAGE:0.5:1:864', # 3d with 5min step
+ 'RRA:AVERAGE:0.5:288:3660', # 10y with 1d step
+ 'RRA:AVERAGE:0.5:2880:366', # 10y with 10d step
+ 'RRA:AVERAGE:0.5:8640:122', # 10y with 30d step
+ 'RRA:AVERAGE:0.5:105408:10', # 10y with 1y step
],
type_cast=Option.list_cast, type_repr=Option.list_repr)
@@ -45,8 +43,7 @@ _logger = logging.getLogger('node.stats_node')
class Sniffer(object):
- def __init__(self, volume):
- path = join(node.stats_root.value, 'node')
+ def __init__(self, volume, path, reset=False):
_logger.info('Collect node stats in %r', path)
self._volume = volume
@@ -54,21 +51,76 @@ class Sniffer(object):
self._stats = {}
for name, cls in _STATS.items():
- self._stats[name] = cls(self._stats, volume)
+ stats = self._stats[name] = cls(self._stats, volume, reset)
+ fields = {}
+ for attr in dir(stats):
+ if attr[0] == '_' or attr[0].isupper() or \
+ type(getattr(stats, attr)) not in (int, long):
+ continue
+ if attr == 'total':
+ dst = 'GAUGE'
+ limit = 60 * 60 * 24 * 365
+ else:
+ dst = 'ABSOLUTE'
+ limit = stats_node_step.value
+ fields[attr] = 'DS:%s:%s:%s:U:U' % (attr, dst, limit)
+ if fields:
+ self._rrd[name].fields = fields
+
+ def __getitem__(self, name):
+ return self._rrd[name]
def log(self, request):
if request.cmd or request.resource not in _STATS:
return
self._stats[request.resource].log(request)
- def commit(self, timestamp=None):
+ def commit(self, timestamp=None, extra_values=None):
_logger.trace('Commit node stats')
for resource, stats in self._stats.items():
- values = stats.commit()
- if values is not None:
+ if resource not in self._rrd:
+ continue
+ values = {}
+ for field in self._rrd[resource].fields:
+ values[field] = getattr(stats, field)
+ if field != 'total':
+ setattr(stats, field, 0)
+ if extra_values and resource in extra_values:
+ values.update(extra_values[resource])
+ if values:
self._rrd[resource].put(values, timestamp=timestamp)
+ def commit_objects(self, reset=False):
+ _logger.trace('Commit object stats')
+
+ for resource, stats in self._stats.items():
+ obj = {
+ 'downloads': 0,
+ 'reviews': (0, 0),
+ }
+ directory = self._volume[resource]
+ for guid, obj_stats in stats.active.items():
+ if not obj_stats.reviews and not obj_stats.downloads:
+ continue
+ if not directory.exists(guid):
+ _logger.warning('Ignore stats for missed %r %s',
+ guid, resource)
+ continue
+ if not reset:
+ obj = directory.get(guid)
+ patch = {}
+ if obj_stats.downloads:
+ patch['downloads'] = obj_stats.downloads + obj['downloads']
+ if obj_stats.reviews:
+ reviews, rating = obj['reviews']
+ reviews += obj_stats.reviews
+ rating += obj_stats.rating
+ patch['reviews'] = [reviews, rating]
+ patch['rating'] = int(round(float(rating) / reviews))
+ directory.update(guid, patch)
+ stats.active.clear()
+
def report(self, dbs, start, end, records):
result = {}
@@ -77,11 +129,14 @@ class Sniffer(object):
return result
if not start:
- start = min([i.first for i in rdbs])
+ start = min([i.first for i in rdbs]) or 0
if not end:
- end = max([i.last for i in rdbs])
+ end = max([i.last for i in rdbs]) or 0
resolution = max(1, (end - start) / records)
+ _logger.debug('Report start=%s end=%s resolution=%s dbs=%r',
+ start, end, resolution, dbs)
+
for rdb in rdbs:
info = result[rdb.name] = []
for ts, ds_values in rdb.get(start, end, resolution):
@@ -105,15 +160,18 @@ class _Stats(object):
RESOURCE = None
OWNERS = []
- def __init__(self, stats, volume):
+ def __init__(self, stats, volume, reset):
+ self.active = {}
self._stats = stats
self._volume = volume
- self._directory = volume[self.RESOURCE]
- def log(self, request):
- pass
+ def __getitem__(self, guid):
+ result = self.active.get(guid)
+ if result is None:
+ result = self.active[guid] = _ObjectStats()
+ return result
- def commit(self):
+ def log(self, request):
pass
@@ -121,16 +179,10 @@ class _ResourceStats(_Stats):
total = 0
- def __init__(self, stats, volume):
- _Stats.__init__(self, stats, volume)
- self.total = volume[self.RESOURCE].find(limit=0)[1]
- self._active = {}
-
- def __getitem__(self, guid):
- result = self._active.get(guid)
- if result is None:
- result = self._active[guid] = _ObjectStats()
- return result
+ def __init__(self, stats, volume, reset):
+ _Stats.__init__(self, stats, volume, reset)
+ if not reset:
+ self.total = volume[self.RESOURCE].find(limit=0)[1]
def log(self, request):
if request.method == 'POST':
@@ -138,37 +190,9 @@ class _ResourceStats(_Stats):
elif request.method == 'DELETE':
self.total -= 1
- def commit(self):
- for guid, stats in self._active.items():
- if not stats.reviews and not stats.downloads:
- continue
- doc = self._directory.get(guid)
- updates = {}
- if stats.downloads:
- updates['downloads'] = stats.downloads + doc['downloads']
- if stats.reviews:
- reviews, rating = doc['reviews']
- reviews += stats.reviews
- rating += stats.rating
- updates['reviews'] = [reviews, rating]
- updates['rating'] = int(round(float(rating) / reviews))
- self._directory.update(guid, updates)
- self._active.clear()
-
- result = {}
- for attr in dir(self):
- if attr[0] == '_' or attr[0].isupper():
- continue
- value = getattr(self, attr)
- if type(value) in (set, dict):
- value = len(value)
- if type(value) in (int, long):
- result[attr] = value
-
- return result
-
def parse_context(self, request):
context = None
+ directory = self._volume[self.RESOURCE]
def parse_context(props):
for owner in self.OWNERS:
@@ -186,14 +210,14 @@ class _ResourceStats(_Stats):
elif self.RESOURCE == 'context':
context = request.guid
elif self.RESOURCE != 'user':
- context = self._directory.get(request.guid)['context']
+ context = directory.get(request.guid)['context']
elif request.method == 'PUT':
if self.RESOURCE == 'context':
context = request.guid
else:
context = request.content.get('context')
if not context:
- context = self._directory.get(request.guid)['context']
+ context = directory.get(request.guid)['context']
elif request.method == 'POST':
context = parse_context(request.content)
@@ -211,17 +235,8 @@ class _ContextStats(_ResourceStats):
released = 0
failed = 0
- reviewed = 0
downloaded = 0
- def commit(self):
- result = _ResourceStats.commit(self)
- self.released = 0
- self.failed = 0
- self.reviewed = 0
- self.downloaded = 0
- return result
-
class _ImplementationStats(_Stats):
@@ -231,7 +246,7 @@ class _ImplementationStats(_Stats):
def log(self, request):
if request.method == 'GET':
if request.prop == 'data':
- context = self._directory.get(request.guid)
+ context = self._volume[self.RESOURCE].get(request.guid)
self._stats['context'][context.context].downloads += 1
self._stats['context'].downloaded += 1
elif request.method == 'POST':
@@ -253,8 +268,6 @@ class _ReviewStats(_ResourceStats):
RESOURCE = 'review'
OWNERS = ['artifact', 'context']
- commented = 0
-
def log(self, request):
_ResourceStats.log(self, request)
@@ -262,17 +275,10 @@ class _ReviewStats(_ResourceStats):
if request.content.get('artifact'):
artifact = self._stats['artifact']
stats = artifact[request.content['artifact']]
- artifact.reviewed += 1
else:
stats = self._stats['context'][self.parse_context(request)]
- self._stats['context'].reviewed += 1
stats.reviews += 1
- stats.rating += request.content['rating']
-
- def commit(self):
- result = _ResourceStats.commit(self)
- self.commented = 0
- return result
+ stats.rating += request.content.get('rating') or 0
class _FeedbackStats(_ResourceStats):
@@ -280,56 +286,18 @@ class _FeedbackStats(_ResourceStats):
RESOURCE = 'feedback'
OWNERS = ['context']
- solutions = 0
- commented = 0
-
- def __init__(self, stats, volume):
- _ResourceStats.__init__(self, stats, volume)
- not_solved = volume['feedback'].find(limit=0, solution='')[1]
- self.solutions = self.total - not_solved
-
- def log(self, request):
- _ResourceStats.log(self, request)
-
- if request.method == 'POST':
- if request.content.get('solution'):
- self.solutions += 1
- elif request.method == 'PUT':
- if cmp(bool(self._directory.get(request.guid)['solution']),
- bool(request.content.get('solution'))):
- if request.content.get('solution'):
- self.solutions += 1
- else:
- self.solutions -= 1
- elif request.method == 'DELETE':
- if self._directory.get(request.guid)['solution']:
- self.solutions -= 1
-
- def commit(self):
- result = _ResourceStats.commit(self)
- self.commented = 0
- return result
-
class _SolutionStats(_ResourceStats):
RESOURCE = 'solution'
OWNERS = ['feedback']
- commented = 0
-
- def commit(self):
- result = _ResourceStats.commit(self)
- self.commented = 0
- return result
-
class _ArtifactStats(_ResourceStats):
RESOURCE = 'artifact'
OWNERS = ['context']
- reviewed = 0
downloaded = 0
def log(self, request):
@@ -340,25 +308,12 @@ class _ArtifactStats(_ResourceStats):
self[request.guid].downloads += 1
self.downloaded += 1
- def commit(self):
- result = _ResourceStats.commit(self)
- self.reviewed = 0
- self.downloaded = 0
- return result
-
-class _CommentStats(_Stats):
+class _CommentStats(_ResourceStats):
RESOURCE = 'comment'
OWNERS = ['solution', 'feedback', 'review']
- def log(self, request):
- if request.method == 'POST':
- for owner in ('solution', 'feedback', 'review'):
- if request.content.get(owner):
- self._stats[owner].commented += 1
- break
-
_STATS = {_UserStats.RESOURCE: _UserStats,
_ContextStats.RESOURCE: _ContextStats,
diff --git a/sugar_network/toolkit/__init__.py b/sugar_network/toolkit/__init__.py
index d56ec59..cbe1b91 100644
--- a/sugar_network/toolkit/__init__.py
+++ b/sugar_network/toolkit/__init__.py
@@ -21,6 +21,7 @@ import shutil
import logging
import tempfile
import collections
+from cStringIO import StringIO
from os.path import exists, join, islink, isdir, dirname, basename, abspath
from os.path import lexists, isfile
@@ -498,6 +499,27 @@ class mkdtemp(str):
shutil.rmtree(self)
+def svg_to_png(data, w, h):
+ import rsvg
+ import cairo
+
+ svg = rsvg.Handle(data=data)
+ surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, w, h)
+ context = cairo.Context(surface)
+
+ scale = min(float(w) / svg.props.width, float(h) / svg.props.height)
+ context.translate(
+ int(w - svg.props.width * scale) / 2,
+ int(h - svg.props.height * scale) / 2)
+ context.scale(scale, scale)
+ svg.render_cairo(context)
+
+ result = StringIO()
+ surface.write_to_png(result)
+ result.seek(0)
+ return result
+
+
def TemporaryFile(*args, **kwargs):
if cachedir.value:
if not exists(cachedir.value):
diff --git a/sugar_network/toolkit/http.py b/sugar_network/toolkit/http.py
index 2506873..d1b2fe7 100644
--- a/sugar_network/toolkit/http.py
+++ b/sugar_network/toolkit/http.py
@@ -310,6 +310,7 @@ class Connection(object):
sys.path.insert(0, sys_path)
from requests import Session, exceptions
Connection._Session = Session
+ # pylint: disable-msg=W0601
global ConnectionError
ConnectionError = exceptions.ConnectionError
@@ -341,7 +342,7 @@ class SugarAuth(object):
def __init__(self, key_path, profile=None):
self._key_path = abspath(expanduser(key_path))
- self._profile = profile or {'color': '#000000,#000000'}
+ self._profile = profile or {}
self._key = None
self._pubkey = None
self._login = None
diff --git a/sugar_network/toolkit/router.py b/sugar_network/toolkit/router.py
index 885bca9..6309301 100644
--- a/sugar_network/toolkit/router.py
+++ b/sugar_network/toolkit/router.py
@@ -575,7 +575,7 @@ class Router(object):
result = None
_logger.trace('%s call: request=%s response=%r result=%r',
- self, request.environ, response, result)
+ self, request.environ, response, repr(result)[:256])
start_response(response.status, response.items())
if result_streamed:
diff --git a/sugar_network/toolkit/rrd.py b/sugar_network/toolkit/rrd.py
index a5f879d..da58aff 100644
--- a/sugar_network/toolkit/rrd.py
+++ b/sugar_network/toolkit/rrd.py
@@ -18,9 +18,10 @@
import re
import os
import time
+import json
import bisect
import logging
-from os.path import exists, join
+from os.path import exists, join, splitext
_DB_FILENAME_RE = re.compile('(.*?)(-[0-9]+){0,1}\\.rrd$')
@@ -28,7 +29,7 @@ _INFO_RE = re.compile('([^[]+)\\[([^]]+)\\]\\.(.*)$')
_FETCH_PAGE = 256
-_logger = logging.getLogger('sugar_stats')
+_logger = logging.getLogger('rrd')
_rrdtool = None
@@ -76,8 +77,8 @@ class Rrd(object):
def get(self, name):
db = self._dbsets.get(name)
if db is None:
- db = self._dbsets[name] = \
- _DbSet(self._root, name, self._step, self._rras)
+ db = _DbSet(self._root, name, self._step, self._rras)
+ self._dbsets[name] = db
return db
@@ -89,10 +90,22 @@ class _DbSet(object):
self._step = step
self._rras = rras
self._revisions = []
- self._field_names = []
+ self._fields = None
+ self._field_names = None
self.__db = None
@property
+ def fields(self):
+ return self._field_names
+
+ @fields.setter
+ def fields(self, fields):
+ self._field_names = fields.keys()
+ self._field_names.sort()
+ self._fields = [str(fields[i]) for i in self._field_names]
+ _logger.debug('Set %r fields for %r', self._fields, self.name)
+
+ @property
def first(self):
if self._revisions:
return self._revisions[0].first
@@ -110,9 +123,11 @@ class _DbSet(object):
return db
def put(self, values, timestamp=None):
- if not self._field_names:
- self._field_names = values.keys()
- self._field_names.sort()
+ if not self.fields:
+ _logger.debug('Parse fields from the first put')
+ self.fields = dict([
+ (i, 'DS:%s:GAUGE:%s:U:U' % (i, self._step * 2))
+ for i in values])
if not timestamp:
timestamp = int(time.time())
@@ -133,7 +148,7 @@ class _DbSet(object):
_logger.debug('Put %r to %s', value, db.path)
- db.put(':'.join(value))
+ db.put(':'.join(value), timestamp)
def get(self, start=None, end=None, resolution=None):
if not self._revisions:
@@ -154,11 +169,11 @@ class _DbSet(object):
break
start = start - start % self._step - self._step
- end = min(end, start + _FETCH_PAGE * resolution)
- end -= end % self._step + self._step
+ last = min(end, start + _FETCH_PAGE * resolution)
+ last -= last % self._step + self._step
for db in reversed(revisions):
- db_end = min(end, db.last - self._step)
+ db_end = min(last, db.last - self._step)
if start > db_end:
break
(row_start, start, row_step), __, rows = _rrdtool.fetch(
@@ -169,17 +184,16 @@ class _DbSet(object):
'--resolution', str(resolution))
for raw_row in rows:
row_start += row_step
+ if row_start > end:
+ break
row = {}
- accept = False
for i, value in enumerate(raw_row):
- row[db.field_names[i]] = value
- accept = accept or value is not None
- if accept:
- yield row_start, row
+ row[db.field_names[i]] = value or .0
+ yield row_start, row
start = db_end + 1
def _get_db(self, timestamp):
- if self.__db is None and self._field_names:
+ if self.__db is None and self._fields:
if self._revisions:
db = self._revisions[-1]
if db.last >= timestamp:
@@ -189,14 +203,13 @@ class _DbSet(object):
return None
if db.step != self._step or db.rras != self._rras or \
db.field_names != self._field_names:
- db = self._create_db(self._field_names, db.revision + 1,
- db.last)
+ db = self._create_db(db.revision + 1, db.last)
else:
- db = self._create_db(self._field_names, 0, timestamp)
+ db = self._create_db(0, timestamp)
self.__db = db
return self.__db
- def _create_db(self, field_names, revision, timestamp):
+ def _create_db(self, revision, timestamp):
filename = self.name
if revision:
filename += '-%s' % revision
@@ -205,15 +218,11 @@ class _DbSet(object):
_logger.debug('Create %s database in %s start=%s step=%s',
filename, self._root, timestamp, self._step)
- fields = []
- for name in field_names:
- fields.append(str('DS:%s:GAUGE:%s:U:U' % (name, self._step * 2)))
-
_rrdtool.create(
str(join(self._root, filename)),
'--start', str(timestamp - self._step),
'--step', str(self._step),
- *(fields + self._rras))
+ *(self._fields + self._rras))
return self.load(filename, revision)
@@ -222,6 +231,7 @@ class _Db(object):
def __init__(self, path, revision=0):
self.path = str(path)
+ self._meta_path = splitext(path)[0] + '.meta'
self.revision = revision
self.fields = []
self.field_names = []
@@ -229,6 +239,7 @@ class _Db(object):
info = _rrdtool.info(self.path)
self.step = info['step']
+ self.first = 0
self.last = info['last_update']
fields = {}
@@ -257,13 +268,17 @@ class _Db(object):
self.fields.append(props)
self.field_names.append(name)
- def put(self, value):
+ if exists(self._meta_path):
+ with file(self._meta_path) as f:
+ self.first = json.load(f).get('first')
+
+ def put(self, value, timestamp):
+ if not self.first:
+ with file(self._meta_path, 'w') as f:
+ json.dump({'first': timestamp}, f)
+ self.first = timestamp
_rrdtool.update(self.path, str(value))
self.last = _rrdtool.info(self.path)['last_update']
- @property
- def first(self):
- return _rrdtool.first(self.path)
-
def __cmp__(self, other):
return cmp(self.revision, other.revision)