Now that Cherrypy session locking is gone, locking is now performed in controller.Database, but only for the SQLite backend.
This commit is contained in:
parent
adcb9611c5
commit
f1d814cfd6
|
@ -5,6 +5,7 @@ import sys
|
||||||
import sha
|
import sha
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import random
|
import random
|
||||||
|
import threading
|
||||||
from model.Persistent import Persistent
|
from model.Persistent import Persistent
|
||||||
from model.Notebook import Notebook
|
from model.Notebook import Notebook
|
||||||
|
|
||||||
|
@ -18,6 +19,20 @@ class Connection_wrapper( object ):
|
||||||
return getattr( self.connection, name )
|
return getattr( self.connection, name )
|
||||||
|
|
||||||
|
|
||||||
|
def synchronized( method ):
|
||||||
|
def lock( self, *args, **kwargs ):
|
||||||
|
if self.lock:
|
||||||
|
self.lock.acquire()
|
||||||
|
|
||||||
|
try:
|
||||||
|
return method( self, *args, **kwargs )
|
||||||
|
finally:
|
||||||
|
if self.lock:
|
||||||
|
self.lock.release()
|
||||||
|
|
||||||
|
return lock
|
||||||
|
|
||||||
|
|
||||||
class Database( object ):
|
class Database( object ):
|
||||||
ID_BITS = 128 # number of bits within an id
|
ID_BITS = 128 # number of bits within an id
|
||||||
ID_DIGITS = "0123456789abcdefghijklmnopqrstuvwxyz"
|
ID_DIGITS = "0123456789abcdefghijklmnopqrstuvwxyz"
|
||||||
|
@ -93,9 +108,10 @@ class Database( object ):
|
||||||
|
|
||||||
self.__connection = \
|
self.__connection = \
|
||||||
Connection_wrapper( sqlite.connect( data_filename, detect_types = sqlite.PARSE_DECLTYPES, check_same_thread = False ) )
|
Connection_wrapper( sqlite.connect( data_filename, detect_types = sqlite.PARSE_DECLTYPES, check_same_thread = False ) )
|
||||||
|
|
||||||
self.__pool = None
|
self.__pool = None
|
||||||
self.__backend = Persistent.SQLITE_BACKEND
|
self.__backend = Persistent.SQLITE_BACKEND
|
||||||
|
self.lock = threading.Lock() # multiple simultaneous client threads make SQLite angry
|
||||||
else:
|
else:
|
||||||
import psycopg2 as psycopg
|
import psycopg2 as psycopg
|
||||||
from psycopg2.pool import PersistentConnectionPool
|
from psycopg2.pool import PersistentConnectionPool
|
||||||
|
@ -125,6 +141,7 @@ class Database( object ):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.__backend = Persistent.POSTGRESQL_BACKEND
|
self.__backend = Persistent.POSTGRESQL_BACKEND
|
||||||
|
self.lock = None # PostgreSQL does its own synchronization
|
||||||
|
|
||||||
self.__cache = cache
|
self.__cache = cache
|
||||||
|
|
||||||
|
@ -151,6 +168,7 @@ class Database( object ):
|
||||||
except ImportError:
|
except ImportError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def save( self, obj, commit = True ):
|
def save( self, obj, commit = True ):
|
||||||
"""
|
"""
|
||||||
Save the given object to the database.
|
Save the given object to the database.
|
||||||
|
@ -182,6 +200,7 @@ class Database( object ):
|
||||||
# no commit yet, so don't touch the cache
|
# no commit yet, so don't touch the cache
|
||||||
connection.pending_saves.append( obj )
|
connection.pending_saves.append( obj )
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def commit( self ):
|
def commit( self ):
|
||||||
connection = self.__get_connection()
|
connection = self.__get_connection()
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
@ -195,6 +214,7 @@ class Database( object ):
|
||||||
|
|
||||||
connection.pending_saves = []
|
connection.pending_saves = []
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def rollback( self ):
|
def rollback( self ):
|
||||||
connection = self.__get_connection()
|
connection = self.__get_connection()
|
||||||
connection.rollback()
|
connection.rollback()
|
||||||
|
@ -230,6 +250,7 @@ class Database( object ):
|
||||||
|
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def select_one( self, Object_type, sql_command, use_cache = False ):
|
def select_one( self, Object_type, sql_command, use_cache = False ):
|
||||||
"""
|
"""
|
||||||
Execute the given sql_command and return its results in the form of an object of Object_type,
|
Execute the given sql_command and return its results in the form of an object of Object_type,
|
||||||
|
@ -274,6 +295,7 @@ class Database( object ):
|
||||||
|
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def select_many( self, Object_type, sql_command ):
|
def select_many( self, Object_type, sql_command ):
|
||||||
"""
|
"""
|
||||||
Execute the given sql_command and return its results in the form of a list of objects of
|
Execute the given sql_command and return its results in the form of a list of objects of
|
||||||
|
@ -311,6 +333,7 @@ class Database( object ):
|
||||||
|
|
||||||
return [ isinstance( item, str ) and unicode( item, encoding = "utf8" ) or item for item in row ]
|
return [ isinstance( item, str ) and unicode( item, encoding = "utf8" ) or item for item in row ]
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def execute( self, sql_command, commit = True ):
|
def execute( self, sql_command, commit = True ):
|
||||||
"""
|
"""
|
||||||
Execute the given sql_command.
|
Execute the given sql_command.
|
||||||
|
@ -328,6 +351,7 @@ class Database( object ):
|
||||||
if commit:
|
if commit:
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def execute_script( self, sql_commands, commit = True ):
|
def execute_script( self, sql_commands, commit = True ):
|
||||||
"""
|
"""
|
||||||
Execute the given sql_commands.
|
Execute the given sql_commands.
|
||||||
|
@ -374,6 +398,7 @@ class Database( object ):
|
||||||
|
|
||||||
return "".join( digits )
|
return "".join( digits )
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def next_id( self, Object_type, commit = True ):
|
def next_id( self, Object_type, commit = True ):
|
||||||
"""
|
"""
|
||||||
Generate the next available object id and return it.
|
Generate the next available object id and return it.
|
||||||
|
@ -404,6 +429,7 @@ class Database( object ):
|
||||||
|
|
||||||
return next_id
|
return next_id
|
||||||
|
|
||||||
|
@synchronized
|
||||||
def close( self ):
|
def close( self ):
|
||||||
"""
|
"""
|
||||||
Shutdown the database.
|
Shutdown the database.
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from pytz import utc
|
from pytz import utc
|
||||||
|
from threading import Thread
|
||||||
from pysqlite2 import dbapi2 as sqlite
|
from pysqlite2 import dbapi2 as sqlite
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from Stub_object import Stub_object
|
from Stub_object import Stub_object
|
||||||
|
@ -10,7 +11,7 @@ from controller.Database import Database, Connection_wrapper
|
||||||
class Test_database( object ):
|
class Test_database( object ):
|
||||||
def setUp( self ):
|
def setUp( self ):
|
||||||
# make an in-memory sqlite database to use in place of PostgreSQL during testing
|
# make an in-memory sqlite database to use in place of PostgreSQL during testing
|
||||||
self.connection = Connection_wrapper( sqlite.connect( ":memory:", detect_types = sqlite.PARSE_DECLTYPES ) )
|
self.connection = Connection_wrapper( sqlite.connect( ":memory:", detect_types = sqlite.PARSE_DECLTYPES, check_same_thread = False ) )
|
||||||
self.cache = Stub_cache()
|
self.cache = Stub_cache()
|
||||||
cursor = self.connection.cursor()
|
cursor = self.connection.cursor()
|
||||||
cursor.execute( Stub_object.sql_create_table() )
|
cursor.execute( Stub_object.sql_create_table() )
|
||||||
|
@ -190,5 +191,31 @@ class Test_database( object ):
|
||||||
assert next_id
|
assert next_id
|
||||||
assert self.database.load( Stub_object, next_id )
|
assert self.database.load( Stub_object, next_id )
|
||||||
|
|
||||||
|
def test_synchronize( self ):
|
||||||
|
def make_objects():
|
||||||
|
for i in range( 50 ):
|
||||||
|
object_id = self.database.next_id( Stub_object )
|
||||||
|
basic_obj = Stub_object( object_id, value = 1 )
|
||||||
|
original_revision = basic_obj.revision
|
||||||
|
|
||||||
|
self.database.execute( basic_obj.sql_create() )
|
||||||
|
obj = self.database.load( Stub_object, basic_obj.object_id )
|
||||||
|
|
||||||
|
assert obj.object_id == basic_obj.object_id
|
||||||
|
assert obj.revision.replace( tzinfo = utc ) == original_revision
|
||||||
|
assert obj.value == basic_obj.value
|
||||||
|
|
||||||
|
object_id = self.database.next_id( Stub_object )
|
||||||
|
|
||||||
|
# if synchronization (locking) is working properly, then these two threads should be able to run
|
||||||
|
# simultaneously without error. without locking, SQLite will raise
|
||||||
|
thread1 = Thread( target = make_objects )
|
||||||
|
thread2 = Thread( target = make_objects )
|
||||||
|
thread1.start()
|
||||||
|
thread2.start()
|
||||||
|
|
||||||
|
thread1.join()
|
||||||
|
thread2.join()
|
||||||
|
|
||||||
def test_backend( self ):
|
def test_backend( self ):
|
||||||
assert self.database.backend == Persistent.SQLITE_BACKEND
|
assert self.database.backend == Persistent.SQLITE_BACKEND
|
||||||
|
|
Reference in New Issue