Created
November 23, 2015 09:13
-
-
Save Phuket2/9e2f08872d2292242ff7 to your computer and use it in GitHub Desktop.
prm.py
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# coding: utf-8 | |
# only testing modes | |
from faker import Faker | |
fake = Faker() | |
# todo : remove, testing modules | |
import sqlite3 | |
from collections import namedtuple | |
import os, shutil | |
# maybe this is not necessary | |
try: | |
import cPickle | |
print 'using cPickle' | |
except: | |
import pickle | |
print 'using pickle' | |
from os.path import isfile, getsize | |
''' | |
Resource - namedtuple | |
this not suposed to be an optimised format. attempting to give a level of flexibity rather than speed or compactness. | |
''' | |
''' | |
if #555, in the comments, i am happy with the result | |
''' | |
Resource = namedtuple('Resource', ['id', 'resid', 'key', 'ord', 'value', 'value1', 'data', 'pickled']) | |
Resource.__new__.__defaults__ = (None, 0, 'MISSING', -1, None, 0, None, False) | |
tb_def = Resource(id = 'INTEGER PRIMARY KEY', resid = 'INTEGER UNIQUE', key = 'TEXT', ord = 'INTEGER', value = 'INTEGER', value1 = 'TEXT', data = 'TEXT', pickled = 'INTEGER') | |
''' | |
insert_pattern, generates something like (?,?,?,?,?,?,?,?) | |
place holcer for each field in Resource | |
''' | |
insert_pattern = '({0})'.format(','.join( c for c in str('?' * len(Resource._fields)))) | |
res_types = ['RES', 'STR', 'TEXT', 'PICT', 'LIST', 'COLOURS', 'TUPLE', 'ROOM', 'IVAN', 'PYUI'] | |
''' | |
wasteful sqlite3 database table definition. However, just loooking for maxium flexibility. is only a app utility. not looking to store huge amounts of data efficently. trying to use one table to achive many types of storage without having table specfic code. Maybe a bad idea, we will see. | |
The table fields are genterated from tb_def which a namedtupe of type Resource. The field name used as field name, the fields value is the fields attributes. | |
''' | |
_table_sql = '''CREATE TABLE IF NOT EXISTS '{0}' ({1})'''.format('{0}', ', '.join(tb_def._fields[i] + ' ' + item for i, item in enumerate(tb_def)) ) | |
_insert_sql = ''' INSERT INTO {0} VALUES ''' + insert_pattern | |
_select_sql = ''' SELECT * FROM {0} WHERE resid = ? ''' | |
_drop_table_sql = ''' DROP TABLE IF EXISTS {0} ''' | |
_table_info_sql = ''' SELECT COUNT(*) FROM {0} ''' | |
_table_exists_sql = ''' SELECT name FROM sqlite_master WHERE type='table' AND name='{0}'; ''' | |
_table_list_sql = ''' SELECT name FROM sqlite_master where type = 'table' ''' | |
_table_select_max_sql = ''' select max({0}) from {1} ''' | |
def db_backup(db_file_name, bak_ext = 'bak'): | |
base = db_file_name.split('.')[0] | |
#ext = db_file_name.split('.')[1] | |
new_file_name = '{0}_{1}.db'.format(base,bak_ext) | |
try: | |
shutil.copyfile(db_file_name, new_file_name) | |
except: | |
raise IOError | |
#print base, ext, new_file_name | |
pass | |
def new_resource(type, resid, key, value): | |
return Resource(id = None, resid = resid, key = key, value = value, value1 = None, ord = 0, data = None, pickled = False ) | |
def key_pair_resource(type, key, value): | |
# short cut to create a key pair value | |
return Resource(id = None, resid = 0, key = key, value = value, value1 = None, ord = 0, data = None, pickled = False ) | |
def namedtuple_factory(cursor, row): | |
""" | |
Usage: | |
con.row_factory = namedtuple_factory | |
""" | |
fields = [col[0] for col in cursor.description] | |
Row = namedtuple("Row", fields) | |
r = Row(*row) | |
print 'fields', fields | |
if r.pickled and r.data: | |
r.data = pickle.loads(r.data) | |
return r | |
def isSQLite3(filename): | |
''' | |
try and determine if the filename is a valid sqlite3 database. if the filename does not exist, still returns False. | |
copied this code from stackflow | |
''' | |
if not isfile(filename): | |
return False | |
if getsize(filename) < 100: | |
# SQLite database file header is 100 bytes | |
return False | |
with open(filename, 'rb') as fd: | |
header = fd.read(100) | |
# i dont think this is future proof, have to think about this | |
# todo | |
return header[:16] == 'SQLite format 3\x00' | |
class PersonalResourceManager (object): | |
''' | |
''' | |
def __init__(self, db_filename): | |
self.dbfile = db_filename | |
self.success = False | |
self.connection = None | |
self.progress_handler = None | |
self.progress_intervals = 0 | |
# always commit | |
self.auto_commit_flag = True | |
# if we can determine we have a valid sqlite3 db | |
# aviod opening it to create it, etc... | |
if not isSQLite3(self.dbfile): | |
conn = self.__dbconn() | |
conn.close() | |
self.success = True | |
# fix this shit. Modify isSQLite3 to only determine if valid | |
# sqllite3 db... | |
self.connection = self.__dbconn() | |
@property | |
def auto_commit(self): | |
return self.auto_commit_flag | |
@auto_commit.setter | |
def auto_commit(self, value): | |
self.auto_commit_flag = value | |
def __dbconn(self): | |
''' | |
The only method used to create a database connection. | |
need to beef up the error checking/handling here | |
''' | |
self.success = False | |
try: | |
conn = sqlite3.connect(self.dbfile) | |
conn.set_progress_handler(self.progress_handler, self.progress_intervals) | |
except Exception as ex: | |
print ex | |
return None | |
self.success = True | |
return conn | |
def __connect(self): | |
''' | |
always get a connnection object via this method. want to always use the context manager. this is not suppose to be a high speed database. is suppose to convient and safe to use. | |
''' | |
if not self.connection: | |
self.connection = self.__dbconn() | |
''' | |
not sure setting the Row factory to None each time a connection is request is smart or not. For the moment i do. | |
''' | |
self.connection.row_factory = None | |
return(self.connection) | |
def db_progress_handler(self, callback, n = 10): | |
self.progress_handler = callback | |
self.progress_intervals = n | |
def table_info(self, type): | |
with self.__connect() as conn: | |
return conn.execute(_table_info_sql.format(type)).fetchone()[0] | |
def table_list(self): | |
''' | |
return a list of tables from the sqlite_master table, only types that == 'name' | |
#555 | |
''' | |
conn = self.__connect() | |
return [ str(tbl[0]) for tbl in conn.execute(_table_list_sql)] | |
def add_table(self, tbs): | |
# add one or more tables | |
# this is only to deal with a non seq type passed in. | |
if not type(tbs) is list: | |
lst = [0] | |
lst[0] = tbs | |
print 'tbs', lst | |
tbs = lst | |
conn = self.__connect() | |
for tb in tbs: | |
conn.execute(_table_sql.format(tb)) | |
def drop_table(self, type): | |
# remove a table | |
conn = self.__connect() | |
conn.execute(_drop_table_sql.format(type)) | |
def table_exists(self, type): | |
conn = self.__connect() | |
return True if conn.execute(_table_exists_sql.format(type)).fetchone() else False | |
def add_entry(self, type, resid , key, ord, value, value1, data, pickled = False): | |
pickle_code = None | |
sql = _insert_sql.format(type) | |
if pickled: | |
data = pickle.dumps(data) | |
pickle_code = _pickle_code | |
# if resid == 0, select the max + 1 for the ID | |
if resid == 0: | |
resid = self.get_max(type, 'id') + 1 | |
if ord == 0: | |
ord = self.get_max(type, 'ord') + 1 | |
conn = self.__connect() | |
conn.execute(sql, (None,resid, key, ord, value, value1,data,pickle_code )) | |
if self.auto_commit_flag: | |
conn.commit() | |
def add_resource(self, type, res, commit = False): | |
pickle_code = None | |
# if we dont provide a resid, get the max num | |
# and add 1 | |
resid = res.resid if res.resid <> 0 else self.get_max(type, 'resid') + 1 | |
data = res.data if not res.pickled else pickle.dumps(res.data) | |
ord = res.ord | |
''' | |
if ord == 0: | |
ord = self.get_max(type, 'ord') + 1 | |
''' | |
sql = _insert_sql.format(type) | |
conn = self.__connect() | |
conn.execute(sql, (None, resid, res.key, ord, res.value, res.value1,data, res.pickled )) | |
if commit: | |
conn.commit() | |
def add_record(self, type, res): | |
pass | |
def get_entry_ID(self, type, resid): | |
conn = self.__connect() | |
conn.row_factory = namedtuple_factory | |
return conn.execute(_select_sql.format(type), (resid,)).fetchone() | |
def get_max(self, type, fld): | |
conn = self.__connect() | |
sql = _table_select_max_sql.format(fld, type) | |
return conn.execute(sql).fetchone()[0] or 0 | |
def get_table_entries(self, type): | |
conn = self.__connect() | |
conn.row_factory = namedtuple_factory | |
sql = ''' Select * from {0} order by resid'''.format(type) | |
for row in conn.execute(sql): | |
yield row | |
def get_table_entries_list(self, type): | |
conn = self.__connect() | |
conn.row_factory = namedtuple_factory | |
sql = ''' Select data, pickled from {0} order by resid'''.format(type) | |
cur = conn.execute(sql) | |
return [d for d in cur ] | |
def shrink_database(self): | |
conn = self.__connect() | |
conn.execute('VACUUM') | |
def fix_database_lock(self): | |
conn = self.__connect() | |
conn.commit() | |
conn.close() | |
def commit(self): | |
''' | |
a manual commit method. you can do multiple single inserts/updates and if the auto_commit_flag is False, you should call this method after otherwise the prior operations will not be written to disk. | |
trying to provide an efficient way to add/upsdate many records to the database with single calls. | |
''' | |
if self.connection: | |
self.connection.commit() | |
def close(self): | |
''' | |
if there is a connection , do a commit, then close the connection | |
''' | |
if self.connection: | |
self.connection.commit() | |
self.connection.close() | |
self.connection = None | |
# context manager methods... | |
def __enter__(self): | |
self.__connect() | |
print 'Opened Database....' | |
def __exit__(self, exc_type, exc_value, traceback ): | |
self.connection.commit() | |
self.connection.close() | |
self.connection = None | |
print 'Database Closing...' | |
def create_random_str_recs(prm, recs): | |
print 'starting to add {0} records'.format(recs) | |
for i in range(1, recs): | |
res = key_pair_resource('IVAN', i + 1000, fake.name()) | |
prm.add_resource('IVAN', res) | |
print '{0} records added'.format(recs) | |
print 'commiting {0} records'.format(recs) | |
prm.commit() | |
print '{0} records committed'.format(recs) | |
def db_progress(self): | |
print 'in database progress' | |
import timeit | |
if __name__ == '__main__': | |
db_file_name = 'junk.db' | |
recs_to_add = 5000 | |
print 'is sqlite3 Db', isSQLite3(db_file_name) | |
prm = PersonalResourceManager('junk.db') | |
#prm.db_progress_handler(db_progress) | |
#prm.fix_database_lock() | |
print 'db success = ' , prm.success | |
prm.add_table(res_types) | |
#timeit.timeit('create_random_str_recs(prm, 100)') | |
create_random_str_recs(prm, recs_to_add) | |
#print prm.get_last_ID('STR') | |
for x in range(1,5): | |
print 'adding res' | |
res = new_resource('STR', 0, 'LANG', fake.name()) | |
prm.add_resource('STR', res, True) | |
res = key_pair_resource('LIST', 101, fake.address()) | |
prm.add_resource('LIST', res) | |
id = prm.get_max('STR', 'id') | |
prm.add_table(['IVAN', 'JOHNNY','JAN', 'Bjarne']) | |
#prm.drop_table('KENNY') | |
print 'table list', prm.table_list() | |
mytbl = 'LIST' | |
print 'table exists:', mytbl, '=', prm.table_exists(mytbl) | |
print 'table info', prm.table_info('TEXT') | |
print 'get max', prm.get_max('STR', 'ord') | |
''' | |
print 'shrinking Database' | |
prm.shrink_database() | |
print 'Finished m shrinking Database...' | |
''' | |
db_backup(db_file_name) | |
prm.drop_table('ROOM') | |
prm.add_table('666') | |
''' | |
with prm: | |
for row in prm.get_table_entries('LIST'): | |
print row | |
with prm: | |
print prm.get_table_entries_list('TUPLE') | |
x = Resource(0, 'shit', 1,2,(1,2,3), 1) | |
print 'named tuple', x | |
print x._fields | |
''' | |
print 'table definition \n' | |
for i, item in enumerate(tb_def): | |
print tb_def._fields[i], '=', item | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment