Web   ·   Wiki   ·   Activities   ·   Blog   ·   Lists   ·   Chat   ·   Meeting   ·   Bugs   ·   Git   ·   Translate   ·   Archive   ·   People   ·   Donate
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorGustavo Duarte <gduarte@activitycentral.com>2012-12-27 01:42:33 (GMT)
committer Gustavo Duarte <gduarte@activitycentral.com>2012-12-27 01:42:33 (GMT)
commit8557dda9f33c0a7cc27b8254ca8250ee246b7713 (patch)
tree71efbb8e592e3b8e3c5847e770fd30931861d92c
parent084262ea16b2b352251065dc63a5bb19fcba93f7 (diff)
add queries
-rw-r--r--build/lib.linux-x86_64-2.7/stats_consolidation/consolidation.py39
-rw-r--r--build/lib.linux-x86_64-2.7/stats_consolidation/db.py48
-rw-r--r--build/lib.linux-x86_64-2.7/stats_consolidation/rrd_files.py2
-rwxr-xr-xsql/make_report47
-rw-r--r--sql/master_create.sql23
-rw-r--r--sql/report.py84
-rw-r--r--sql/report.pycbin0 -> 1998 bytes
-rw-r--r--src/consolidation.py35
-rw-r--r--src/consolidation.pycbin1589 -> 0 bytes
-rwxr-xr-xsrc/consolidation_run10
-rw-r--r--src/db.py202
-rw-r--r--src/db.pycbin6765 -> 0 bytes
-rw-r--r--src/rrd_files.py133
-rw-r--r--src/rrd_files.pycbin4993 -> 0 bytes
-rw-r--r--src/stats_consolidation/consolidation.py39
-rw-r--r--src/stats_consolidation/db.py48
-rw-r--r--src/stats_consolidation/rrd_files.py2
-rw-r--r--src/test_cons.py9
-rw-r--r--src/test_db.py6
-rw-r--r--src/test_rrd.py39
20 files changed, 273 insertions, 493 deletions
diff --git a/build/lib.linux-x86_64-2.7/stats_consolidation/consolidation.py b/build/lib.linux-x86_64-2.7/stats_consolidation/consolidation.py
index 01d8b84..a1fd79a 100644
--- a/build/lib.linux-x86_64-2.7/stats_consolidation/consolidation.py
+++ b/build/lib.linux-x86_64-2.7/stats_consolidation/consolidation.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import os
import argparse
@@ -14,22 +15,24 @@ class Consolidation:
self.db = db
def process_rrds (self):
id_hash_list = os.listdir(unicode(self.base_path))
- if id_hash_list:
- for id_hash in id_hash_list:
- user_hash_list = os.listdir( unicode( os.path.join(self.base_path, id_hash) ) )
- if user_hash_list:
- for user_hash in user_hash_list:
- rrd_list = os.listdir( unicode(os.path.join(self.base_path, id_hash, user_hash)) )
- if rrd_list:
- for rrd in rrd_list:
- rrd_path = unicode (os.path.join(self.base_path, id_hash, user_hash) )
- rrd_obj = RRD (path=rrd_path, name=rrd, date_start=self.date_start, date_end=None)
- self.db.store_activity_uptime(rrd_obj)
- else:
- print "None rrd file found" + os.path.join(self.base_path, id_hash, user_hash)
- else:
- print "None hash user found on: " + os.path.join(self.base_path, id_hash)
- else:
- print "None hash ids found on: " + self.base_path
-
+ try:
+ if id_hash_list:
+ for id_hash in id_hash_list:
+ user_hash_list = os.listdir( unicode( os.path.join(self.base_path, id_hash) ) )
+ if user_hash_list:
+ for user_hash in user_hash_list:
+ rrd_list = os.listdir( unicode(os.path.join(self.base_path, id_hash, user_hash)) )
+ if rrd_list:
+ for rrd in rrd_list:
+ rrd_path = unicode (os.path.join(self.base_path, id_hash, user_hash) )
+ rrd_obj = RRD (path=rrd_path, name=rrd, date_start=self.date_start, date_end=None)
+ self.db.store_activity_uptime(rrd_obj)
+ else:
+ print ("RRD file not found: {}".format(os.path.join(self.base_path, id_hash, user_hash)))
+ else:
+ print ("None hash user found on: {}".format(os.path.join(self.base_path, id_hash)))
+ else:
+ print ("None hash ids found on: {}" + format(self.base_path))
+ except Exception as e:
+ print ("Error: {}".format(str(e)))
diff --git a/build/lib.linux-x86_64-2.7/stats_consolidation/db.py b/build/lib.linux-x86_64-2.7/stats_consolidation/db.py
index ddc1006..68444f2 100644
--- a/build/lib.linux-x86_64-2.7/stats_consolidation/db.py
+++ b/build/lib.linux-x86_64-2.7/stats_consolidation/db.py
@@ -90,6 +90,7 @@ class DB_Stats:
raise Exception ("Error: {}".format(err))
cursor.close()
+
def close (self):
@@ -204,3 +205,50 @@ class DB_Stats:
except mysql.connector.Error as err:
print("Fail {}: {}".format(cursor.statement, err))
cursor.close()
+
+
+ def connect (self):
+ print ("Try connect to db")
+ try:
+ self.cnx = mysql.connector.connect(user=self.user, password=self.password)
+ cursor = self.cnx.cursor()
+ self.cnx.database = self.db_name
+ cursor.close()
+ except mysql.connector.Error as err:
+ print("CONNECT FAIL {}".format (err))
+
+ def most_activity_used (self):
+ uptime_last=0
+ try:
+ cursor1 = self.cnx.cursor()
+ cursor2 = self.cnx.cursor()
+ cursor1.execute("SELECT name FROM Resources")
+
+ rows = cursor1.fetchall()
+ for name in rows:
+ if (name[0] != 'system') and (name[0] != 'journal') and (name[0] != 'network') and (name[0] != 'shell'):
+ cursor2.execute ("SELECT SUM(data) FROM Usages WHERE resource_name = %s", (name[0],))
+ uptime = cursor2.fetchone()
+ if uptime[0] > uptime_last:
+ uptime_last= uptime[0]
+ activity_name = name[0]
+ except mysql.connector.Error as err:
+ print("Fail {}:".format(err))
+ except:
+ print("most_activity_used Fail ")
+
+ cursor1.close()
+ cursor2.close()
+ return (activity_name, uptime_last)
+
+ def frequency_usage(self):
+ cursor = self.cnx.cursor()
+ try:
+ cursor.execute("SELECT SUM(data) FROM Usages WHERE resource_name = system")
+ res = cursor.fetchone()
+ except mysql.connector.Error as err:
+ print("ferquency_usage")
+ cursor.close()
+
+ return res
+
diff --git a/build/lib.linux-x86_64-2.7/stats_consolidation/rrd_files.py b/build/lib.linux-x86_64-2.7/stats_consolidation/rrd_files.py
index a437e0d..c5561d7 100644
--- a/build/lib.linux-x86_64-2.7/stats_consolidation/rrd_files.py
+++ b/build/lib.linux-x86_64-2.7/stats_consolidation/rrd_files.py
@@ -47,7 +47,7 @@ class RRD:
try:
self.rrd = rrdtool.fetch (str(os.path.join(path,name)), 'AVERAGE', '-r 60', '-s '+ self.date_start, '-e '+self.date_end)
except:
- raise
+ raise Exception("rrdtool.fetch FAIL")
print " DS "
for item in self.DS.keys():
diff --git a/sql/make_report b/sql/make_report
new file mode 100755
index 0000000..8dc18d3
--- /dev/null
+++ b/sql/make_report
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2012, Gustavo Duarte
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
+
+import argparse
+from stats_consolidation.db import *
+import sys
+parser = argparse.ArgumentParser()
+parser.add_argument('--query',required=True)
+parser.add_argument('--start_date',required=True)
+parser.add_argument('--end_date',required=True)
+parser.add_argument('--db_name',required=True)
+parser.add_argument('--db_user',required=True)
+parser.add_argument('--db_pass',required=True)
+
+args = parser.parse_args()
+
+
+"""try:"""
+re = DB_Stats (args.db_name, args.db_user, args.db_pass)
+re.connect()
+if args.query == 'activity_most_used':
+ activity = re.most_activity_used();
+ print ("**************************************************************************************************************")
+ print ("THE MOST USED ACTIVITY: {}, TIME: {} hrs / {} mins / {} secs".format(activity[0], activity[1]/60/60, activity[1]/60, activity[1]))
+ print ("***************************************************************************************************************")
+
+if args.query == 'frequency_usage':
+ fres = re.frequency_usage();
+ print("Frequency usage: {}".format(fres))
+"""
+print ("Error: {}".format(sys.exc_info()[0]))
+"""
diff --git a/sql/master_create.sql b/sql/master_create.sql
deleted file mode 100644
index 2ff35c7..0000000
--- a/sql/master_create.sql
+++ /dev/null
@@ -1,23 +0,0 @@
-CREATE TABLE Usages (
- timeStamp TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
- userId INTEGER NOT NULL,
- resourceId INTEGER NOT NULL,
- startDate TIMESTAMP NOT NULL,
- dataType INTEGER NOT NULL,
- data INTEGER NOT NULL);
-
-CREATE TABLE Resources (
- id INTEGER auto_increment unique,
- name CHAR(250),
- PRIMARY KEY (name)
-);
-
-CREATE TABLE Users(
- id INTEGER auto_increment unique,
- machineSN CHAR(80),
- age INTEGER NOT NULL,
- school CHAR(80),
- software_version CHAR (80),
- timeStamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
- PRIMARY KEY (machineSN)
-);
diff --git a/sql/report.py b/sql/report.py
new file mode 100644
index 0000000..e3375a5
--- /dev/null
+++ b/sql/report.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2012, Gustavo Duarte
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
+
+from stats_consolidation.db import *
+
+
+
+class Report:
+ STAT={}
+ STAT ['Get_resource_name'] = ("SELECT name FROM `Resources`")
+
+ STAT ['Get_suma_uptime'] = ( "SELECT `data` FROM Usages (WHERE `resource_name` = %s")
+
+ STAT ['Get_frequency_usage'] = ("SELECT SUM(`data`) FROM Usages (Where `resource_name` = `system`)")
+
+
+ def __init__ (self, db_name, user, password):
+ self.db_name = db_name
+ self.user = user
+ self.password = password
+ """
+ try:
+ self.connect()
+ except Exception as e:
+ print ("INIT:")
+ """
+ def connect (self):
+ print ("Try connect to db")
+ try:
+ self.cnx = mysql.connector.connect(user=self.user, password=self.password)
+ cursor = self.cnx.cursor()
+ self.cnx.database = self.db_name
+ cursor.close()
+ except mysql.connector.Error as err:
+ print("CONNECT FAIL {}".format (err))
+
+ def most_activity_use (self):
+ print ("most_activity_used")
+ try:
+ res_tmp =('', 0)
+ res = ('', 0)
+ cursor = self.cnx.cursor()
+ cursor.execute(self.STAT['Get_resource_name'])
+ for (name) in cursor:
+ cursor.execute (self.STAT['Get_suma_uptime'], (name,))
+ res_tmp =(name, cursor.fetchone())
+ print ("activity: {} uptime: {}".format(res_tmp[0], res_tmpe[1]))
+ if res_tmp[1] > res[1]:
+ res = res_tmp
+ except mysql.connector.Error as err:
+ print("Fail {}: {}".format(cursor.statement, err))
+ except Exception as e:
+ print("most_activity_used Fail ")
+ cursor.close()
+ return res
+
+ def frequency_usage(self):
+ cursor = self.cnx.cursor()
+ try:
+ cursor.execute(self.STAT['Get_frequency_usage'])
+ res = cursor.fetchone()
+ except mysql.connector.Error as err:
+ print("ferquency_usage")
+ cursor.close()
+
+ return res
+
+
+
diff --git a/sql/report.pyc b/sql/report.pyc
new file mode 100644
index 0000000..881fad7
--- /dev/null
+++ b/sql/report.pyc
Binary files differ
diff --git a/src/consolidation.py b/src/consolidation.py
deleted file mode 100644
index 978ffe4..0000000
--- a/src/consolidation.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-import rrd_files
-import db
-import argparse
-
-from rrd_files import *
-from db import *
-
-class Consolidation:
-
- def __init__(self, path, db):
- self.base_path = path
- self.date_start = db.get_date_last_record()
- self.db = db
- def process_rrds (self):
- id_hash_list = os.listdir(unicode(self.base_path))
- if id_hash_list:
- for id_hash in id_hash_list:
- user_hash_list = os.listdir( unicode( os.path.join(self.base_path, id_hash) ) )
- if user_hash_list:
- for user_hash in user_hash_list:
- rrd_list = os.listdir( unicode(os.path.join(self.base_path, id_hash, user_hash)) )
- if rrd_list:
- for rrd in rrd_list:
- rrd_path = unicode (os.path.join(self.base_path, id_hash, user_hash) )
- rrd_obj = RRD (path=rrd_path, name=rrd, date_start=self.date_start, date_end=None)
- self.db.store_activity_uptime(rrd_obj)
- else:
- print "None rrd file found" + os.path.join(self.base_path, id_hash, user_hash)
- else:
- print "None hash user found on: " + os.path.join(self.base_path, id_hash)
- else:
- print "None hash ids found on: " + self.base_path
-
-
diff --git a/src/consolidation.pyc b/src/consolidation.pyc
deleted file mode 100644
index 312fd1f..0000000
--- a/src/consolidation.pyc
+++ /dev/null
Binary files differ
diff --git a/src/consolidation_run b/src/consolidation_run
index 6de6ef4..6b41880 100755
--- a/src/consolidation_run
+++ b/src/consolidation_run
@@ -30,23 +30,17 @@ parser.add_argument('--log_path',required=True)
args = parser.parse_args()
-"""
try:
db = DB_Stats (args.db_name, args.db_user, args.db_pass)
db.create()
except Exception as e:
- print ("Creating DB: {}".format (str(e)
+ print ("Creating DB: {}".format (str(e)))
try:
con = Consolidation(args.rrd_path, db)
con.process_rrds()
except Exception as e:
- print ("Processing rrd file: {}".format(str(e)
-"""
-db = DB_Stats (args.db_name, args.db_user, args.db_pass)
-db.create()
+ print ("Processing rrd file: {}".format(str(e)))
-con = Consolidation(args.rrd_path, db)
-con.process_rrds()
diff --git a/src/db.py b/src/db.py
deleted file mode 100644
index a8939e5..0000000
--- a/src/db.py
+++ /dev/null
@@ -1,202 +0,0 @@
-from __future__ import print_function
-import mysql.connector
-from mysql.connector import errorcode
-from datetime import datetime
-
-class DB_Stats:
- TABLES={}
-
- TABLES['Usages'] = (
- "CREATE TABLE `Usages` ("
- " `ts` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,"
- " `user_hash` CHAR(40) NOT NULL,"
- " `resource_name` CHAR(80),"
- " `start_date` TIMESTAMP NOT NULL,"
- " `data_type` CHAR (30) NOT NULL,"
- " `data` INTEGER NOT NULL,"
- " PRIMARY KEY (`user_hash`,`start_date`,`resource_name`, `data_type`)"
- " )")
-
- TABLES['Resources'] = (
- "CREATE TABLE Resources ("
- " `name` CHAR(250),"
- " PRIMARY KEY (name)"
- " )")
-
- TABLES['Users'] = (
- "CREATE TABLE Users("
- " `hash` CHAR (40) NOT NULL,"
- " `uuid` CHAR (32) NOT NULL,"
- " `machine_sn` CHAR(80),"
- " `age` INTEGER NOT NULL,"
- " `school` CHAR(80),"
- " `sw_version` CHAR (80),"
- " `ts` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,"
- " PRIMARY KEY (hash)"
- " )")
-
- TABLES['Runs'] = (
- "CREATE TABLE Runs("
- " `last_ts` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP "
- ")")
-
-
-
- def __init__(self, db_name, user, password):
- self.db_name = db_name
- self.user = user
- self.password = password
-
-
- def create_database(self, cursor):
- try:
- cursor.execute(
- "CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(self.db_name))
- except mysql.connector.Error as err:
- raise Exception ("Failed creating database: {}".format(err))
-
- def create_tables(self, cursor):
- for name, ddl in self.TABLES.iteritems():
- try:
- print("Creating table {}: ".format(name), end='')
- cursor.execute(ddl)
- except mysql.connector.Error as err:
- if err.errno == errorcode.ER_TABLE_EXISTS_ERROR:
- print("already exists.")
- else:
- raise Exception ("Error: {}".format(err))
- else:
- print("OK")
-
- def create (self):
- self.cnx = mysql.connector.connect(user=self.user, password=self.password)
- cursor = self.cnx.cursor()
- """Try connect to db """
- try:
- self.cnx.database = self.db_name
- print("DB ["+self.db_name+"] created already, will try create tables:" )
- self.create_tables(cursor)
- except mysql.connector.Error as err:
- """If db not exist, then create"""
- if err.errno == errorcode.ER_BAD_DB_ERROR:
- self.create_database(cursor)
- self.cnx.database = self.db_name
- self.create_tables(cursor)
- else:
- raise Exception ("Error: {}".format(err))
- cursor.close()
-
-
-
- def close (self):
- self.cnx.close()
-
-
-
- def store_activity_uptime(self, rrd):
-
- self.store_resource(rrd.get_name())
- self.store_user(rrd)
-
- cursor = self.cnx.cursor()
- insert = ("INSERT INTO Usages "
- "(user_hash, "
- "resource_name, "
- "start_date, "
- "data_type, "
- "data) "
- "VALUES (%s, %s, %s, %s ,%s) ")
-
- for d in rrd.get_uptime_by_interval():
- info = (rrd.get_user_hash(), rrd.get_name() , datetime.fromtimestamp(float(d[0])), 'uptime', d[1])
- try:
- cursor.execute(insert, info)
- if self.update_last_record(rrd.get_date_last_record()) == 0:
- self.cnx.commit()
-
- except mysql.connector.Error as err:
- print("Fail {}: {}".format(cursor.statement, err))
- cursor.close()
-
-
- def store_resource(self, resource_name):
- cursor = self.cnx.cursor()
- op = ("SELECT name FROM Resources WHERE name = %s")
- params = (resource_name,)
- try:
- cursor.execute(op, params)
- result = cursor.fetchone()
- if result != None:
- print("Resource {} already in db".format(resource_name))
- else:
- insert = ("INSERT INTO Resources (name) VALUES (%s)")
- info = (resource_name, )
- cursor.execute(insert, info)
- self.cnx.commit()
- except mysql.connector.Error as err:
- print("Fail {}: {}".format(cursor.statement, err))
-
- cursor.close()
-
- def store_user (self, rrd):
- cursor = self.cnx.cursor()
- op = ("SELECT hash FROM Users WHERE hash = %s")
- params = (rrd.get_user_hash(), )
- try:
- cursor.execute(op, params)
- result = cursor.fetchone()
- if result != None:
- print("User {} already in db".format(rrd.user_hash))
- else:
- """FIXME change hardcoded values """
- insert = ("INSERT INTO Users (hash, uuid, machine_sn, age, school, sw_version) VALUES (%s, %s, %s, %s, %s, %s)")
- params = (rrd.get_user_hash(), rrd.get_uuid(), "unk_machine_sn", 0, "unk_escuela", "1.0.0")
- cursor.execute(insert, params)
- self.cnx.commit()
- except mysql.connector.Error as err:
- print("Fail {}: {}".format(cursor.statement, err))
-
- cursor.close()
-
-
-
- def update_last_record (self, ts):
- cursor = self.cnx.cursor()
- res = 0
- op = ("SELECT * FROM Runs")
- params = (datetime.fromtimestamp(float(ts)),)
- try:
- cursor.execute(op)
- result = cursor.fetchone()
-
- if result != None:
- op = ("UPDATE Runs SET last_ts = %s")
- cursor.execute(op, params)
- self.cnx.commit()
- else:
- op = ("INSERT INTO Runs VALUES(%s)")
- cursor.execute(op, params)
- self.cnx.commit()
-
- except mysql.connector.Error as err:
- print("Fail {}: {}".format(cursor.statement, err))
- res = -1
-
- cursor.close()
- return res
-
- def get_date_last_record (self):
- cursor = self.cnx.cursor()
- op = ("SELECT UNIX_TIMESTAMP ((SELECT last_ts FROM Runs))")
- try:
- cursor.execute(op)
- result = cursor.fetchone()
- if result != None:
- print ("last record: {}".format(result[0]))
- return result[0]
- else:
- print ("Last date record is None")
- return 0
- except mysql.connector.Error as err:
- print("Fail {}: {}".format(cursor.statement, err))
- cursor.close()
diff --git a/src/db.pyc b/src/db.pyc
deleted file mode 100644
index 2d3fcf7..0000000
--- a/src/db.pyc
+++ /dev/null
Binary files differ
diff --git a/src/rrd_files.py b/src/rrd_files.py
deleted file mode 100644
index a437e0d..0000000
--- a/src/rrd_files.py
+++ /dev/null
@@ -1,133 +0,0 @@
-import rrdtool
-import os
-import sys
-
-class RRD:
-
- hdr_item = 0
- ds_item = 1
- data_item = 2
- DS = {'active':0, 'buddies':0, 'instances':0, 'new':0, 'resumed':0, 'uptime':0}
-
- def __init__(self, path, name, date_start, date_end):
-
- self.rrd_name = name
-
- if date_start == None:
- self.date_start = str(rrdtool.first(str(os.path.join (path,name))))
- else:
- self.date_start = str(date_start)
-
-
- if date_end == None:
- self.date_end = str(rrdtool.last(str(os.path.join(path, name))))
- else:
- self.date_end = str(date_end)
-
- self.user_hash = os.path.split(path)[1]
-
- self.user_path = os.path.join (
- self.get_first_part_path(path, 3),
- "users",
- "user",
- self.user_hash[:2],
- self.user_hash
- )
-
- self.uuid = self.get_uuid_from_file(self.user_path)
-
-
- print "*******************************************"
- print " RRD "
- print "start: " + self.date_start
- print "end: " + self.date_end
- print "PATH: " + path
- print "RRD NAME: " + name
- print "\n"
- try:
- self.rrd = rrdtool.fetch (str(os.path.join(path,name)), 'AVERAGE', '-r 60', '-s '+ self.date_start, '-e '+self.date_end)
- except:
- raise
-
- print " DS "
- for item in self.DS.keys():
- idx = self.get_ds_index (item)
- if idx != -1:
- self.DS[item] = idx
- print "DS "+ item + ": " + str(self.DS[item])
- else:
- print "DS "+ item + " not found in header"
- print "***********************************************"
-
- def get_ds_index(self, ds):
- i=0
- for i in range (len (self.rrd[self.ds_item])):
- if self.rrd[self.ds_item][i] == ds:
- return i
- i=+1
- return -1
-
- def get_uptime_by_interval (self):
- ds_name = "uptime"
- res=list()
-
- print "-------Calcule "+ ds_name +"-------"
- i=0
- found = False
- while i < len(self.rrd[self.data_item]):
- value = str(self.rrd[self.data_item][i][self.DS[ds_name]])
- if value != "None":
- uptime = value
- end = str (long(self.date_start) + ((i+1) * 60))
- if found == False:
- found = True
- start = str (long (self.date_start) + ((i+1) * 60))
- else:
- if found:
- print start + "->" + end + ": " + uptime
- if float(uptime) > 0:
- res.append((start, uptime))
- found = False
- i=i+1
- return res
- print "---------------------------------------------------"
-
-
- def get_name(self):
- return self.rrd_name.partition(".rrd")[0]
-
- def show_valid_ds(self, ds_name):
- print "------------------- DS "+ ds_name +"---------------------"
- i=0
- while i < len(self.rrd[self.data_item]):
- timestamp = str (long (self.date_start) + ((i+1) * 60))
- value = str (self.rrd[self.data_item][i][self.DS[ds_name]])
-
- if value != "None":
- print timestamp+ ": " + value
- i=i+1
- print "---------------------------------------------------"
-
-
- def get_date_last_record(self):
- return self.date_end
-
- def set_user_hash(self, u_hash):
- self.user_hash = u_hash
-
- def get_first_part_path (self, path, idx):
- l=list()
- l.append(path)
- for i in range (idx):
- l.append(os.path.split(l[i])[0])
- return l[idx]
-
- def get_uuid_from_file(self,path):
- return open (os.path.join(path, "machine_uuid")).next()
-
-
- def get_user_hash(self):
- return self.user_hash
-
- def get_uuid (self):
- return self.uuid
diff --git a/src/rrd_files.pyc b/src/rrd_files.pyc
deleted file mode 100644
index 7f26699..0000000
--- a/src/rrd_files.pyc
+++ /dev/null
Binary files differ
diff --git a/src/stats_consolidation/consolidation.py b/src/stats_consolidation/consolidation.py
index 01d8b84..a1fd79a 100644
--- a/src/stats_consolidation/consolidation.py
+++ b/src/stats_consolidation/consolidation.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import os
import argparse
@@ -14,22 +15,24 @@ class Consolidation:
self.db = db
def process_rrds (self):
id_hash_list = os.listdir(unicode(self.base_path))
- if id_hash_list:
- for id_hash in id_hash_list:
- user_hash_list = os.listdir( unicode( os.path.join(self.base_path, id_hash) ) )
- if user_hash_list:
- for user_hash in user_hash_list:
- rrd_list = os.listdir( unicode(os.path.join(self.base_path, id_hash, user_hash)) )
- if rrd_list:
- for rrd in rrd_list:
- rrd_path = unicode (os.path.join(self.base_path, id_hash, user_hash) )
- rrd_obj = RRD (path=rrd_path, name=rrd, date_start=self.date_start, date_end=None)
- self.db.store_activity_uptime(rrd_obj)
- else:
- print "None rrd file found" + os.path.join(self.base_path, id_hash, user_hash)
- else:
- print "None hash user found on: " + os.path.join(self.base_path, id_hash)
- else:
- print "None hash ids found on: " + self.base_path
-
+ try:
+ if id_hash_list:
+ for id_hash in id_hash_list:
+ user_hash_list = os.listdir( unicode( os.path.join(self.base_path, id_hash) ) )
+ if user_hash_list:
+ for user_hash in user_hash_list:
+ rrd_list = os.listdir( unicode(os.path.join(self.base_path, id_hash, user_hash)) )
+ if rrd_list:
+ for rrd in rrd_list:
+ rrd_path = unicode (os.path.join(self.base_path, id_hash, user_hash) )
+ rrd_obj = RRD (path=rrd_path, name=rrd, date_start=self.date_start, date_end=None)
+ self.db.store_activity_uptime(rrd_obj)
+ else:
+ print ("RRD file not found: {}".format(os.path.join(self.base_path, id_hash, user_hash)))
+ else:
+ print ("None hash user found on: {}".format(os.path.join(self.base_path, id_hash)))
+ else:
+ print ("None hash ids found on: {}" + format(self.base_path))
+ except Exception as e:
+ print ("Error: {}".format(str(e)))
diff --git a/src/stats_consolidation/db.py b/src/stats_consolidation/db.py
index ddc1006..68444f2 100644
--- a/src/stats_consolidation/db.py
+++ b/src/stats_consolidation/db.py
@@ -90,6 +90,7 @@ class DB_Stats:
raise Exception ("Error: {}".format(err))
cursor.close()
+
def close (self):
@@ -204,3 +205,50 @@ class DB_Stats:
except mysql.connector.Error as err:
print("Fail {}: {}".format(cursor.statement, err))
cursor.close()
+
+
+ def connect (self):
+ print ("Try connect to db")
+ try:
+ self.cnx = mysql.connector.connect(user=self.user, password=self.password)
+ cursor = self.cnx.cursor()
+ self.cnx.database = self.db_name
+ cursor.close()
+ except mysql.connector.Error as err:
+ print("CONNECT FAIL {}".format (err))
+
+ def most_activity_used (self):
+ uptime_last=0
+ try:
+ cursor1 = self.cnx.cursor()
+ cursor2 = self.cnx.cursor()
+ cursor1.execute("SELECT name FROM Resources")
+
+ rows = cursor1.fetchall()
+ for name in rows:
+ if (name[0] != 'system') and (name[0] != 'journal') and (name[0] != 'network') and (name[0] != 'shell'):
+ cursor2.execute ("SELECT SUM(data) FROM Usages WHERE resource_name = %s", (name[0],))
+ uptime = cursor2.fetchone()
+ if uptime[0] > uptime_last:
+ uptime_last= uptime[0]
+ activity_name = name[0]
+ except mysql.connector.Error as err:
+ print("Fail {}:".format(err))
+ except:
+ print("most_activity_used Fail ")
+
+ cursor1.close()
+ cursor2.close()
+ return (activity_name, uptime_last)
+
+ def frequency_usage(self):
+ cursor = self.cnx.cursor()
+ try:
+ cursor.execute("SELECT SUM(data) FROM Usages WHERE resource_name = system")
+ res = cursor.fetchone()
+ except mysql.connector.Error as err:
+ print("ferquency_usage")
+ cursor.close()
+
+ return res
+
diff --git a/src/stats_consolidation/rrd_files.py b/src/stats_consolidation/rrd_files.py
index a437e0d..c5561d7 100644
--- a/src/stats_consolidation/rrd_files.py
+++ b/src/stats_consolidation/rrd_files.py
@@ -47,7 +47,7 @@ class RRD:
try:
self.rrd = rrdtool.fetch (str(os.path.join(path,name)), 'AVERAGE', '-r 60', '-s '+ self.date_start, '-e '+self.date_end)
except:
- raise
+ raise Exception("rrdtool.fetch FAIL")
print " DS "
for item in self.DS.keys():
diff --git a/src/test_cons.py b/src/test_cons.py
deleted file mode 100644
index c91ed42..0000000
--- a/src/test_cons.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import consolidation
-from consolidation import *
-
-db = DB_Stats('statistics', 'root', 'gustavo')
-db.create();
-
-con = Consolidation('/home/gustavo/AC/server_stats/sugar-stats/rrd', db)
-
-con.process_rrds()
diff --git a/src/test_db.py b/src/test_db.py
deleted file mode 100644
index 7c111e3..0000000
--- a/src/test_db.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import db
-from db import *
-
-db = DB_Stats('statistics', 'root', 'gustavo')
-db.create();
-
diff --git a/src/test_rrd.py b/src/test_rrd.py
deleted file mode 100644
index 0fb2028..0000000
--- a/src/test_rrd.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import rrd_files
-import db
-
-from rrd_files import *
-from db import *
-
-
-print "============================== TEST RRD -> Relational DB ========================================"
-db = DB_Stats('statistics', 'root', 'gustavo')
-db.create()
-
-DATE_START =datetime(year=2012,
- month=12,
- day=13,
- hour=0,
- minute=0,
- second=0).strftime("%s")
-
-
-DATE_END = datetime(year=2012,
- month=12,
- day=14,
- hour=0,
- minute=0,
- second=0).strftime("%s")
-
-DATE_START = db.get_date_last_record()
-DATE_END = datetime.now().strftime("%s")
-
-act_rrd = RRD (path = "/home/gustavo/AC/consolidation/rrds", name="pippy.rrd", date_start=DATE_START, date_end=DATE_END)
-"""
-act_rrd.show_valid_ds("uptime")
-act_rrd.show_valid_ds("resumed")
-act_rrd.show_valid_ds("new")
-act_rrd.show_valid_ds("instances")
-act_rrd.show_valid_ds("buddies")
-"""
-data = {}
-db.store_activity_uptime(act_rrd)