diff --git a/INSTALL b/INSTALL index 599a9d1..a480ad0 100644 --- a/INSTALL +++ b/INSTALL @@ -4,19 +4,17 @@ you shouldn't need if you only want to make a wiki. First, install the prerequisites: - * Python 2.5 + * Python 2.4 * CherryPy 2.2 * PostgreSQL 8.1 * psycopg 2.0 * simplejson 1.3 + * pytz 2006p In Debian GNU/Linux, you can issue the following command to install these packages: - apt-get install python2.5 python-cherrypy postgresql-8.1 python-psycopg2 python-simplejson - -If you're using Debian Etch, see the note below about "psycopg in Debian -Etch". + apt-get install python2.4 python-cherrypy postgresql-8.1 python-psycopg2 python-simplejson python-tz development mode @@ -40,14 +38,15 @@ and set the password to "dev". createuser -S -d -R -P -E luminotes -Initialize the database with the starting schema and basic data: +Initialize the database with the starting schema and default data: - psql -U luminotes postgres -f model/schema.sql - psql -U luminotes postgres -f model/data.sql + createdb -W -U luminotes luminotes + export PYTHONPATH=. + python2.4 tools/initdb.py To start the server in development mode, run: - python2.5 luminotes.py -d + python2.4 luminotes.py -d Connect to the following URL in a web browser running on the same machine: @@ -114,18 +113,20 @@ Restart postgresql so these changes take effect: /etc/init.d/postgresql restart As the PostgreSQL superuser (usually "postgres"), create a new database user -and set the password to "dev". +and set a new password, for instance, "mypassword". createuser -S -d -R -P -E luminotes -Initialize the database with the starting schema and basic data: +Initialize the database with the starting schema and default data: - psql -U luminotes postgres -f model/schema.sql - psql -U luminotes postgres -f model/data.sql + createdb -W -U luminotes luminotes + export PYTHONPATH=. + export PGPASSWORD=mypassword + python2.4 tools/initdb.py Then to actually start the production mode server, run: - python2.5 luminotes.py + python2.4 luminotes.py You should be able to connect to the site at whatever domain you've configured Apache to serve. @@ -137,44 +138,28 @@ Python unit tests If you're interested in running unit tests of the server, install: * nose 0.9.0 + * pysqlite 2.3 In Debian GNU/Linux, you can issue the following command to install this package: - apt-get install python-nose + apt-get install python-nose python-pysqlite2 Then you can run unit tests by running: nosetests -JavaScript unit tests ---------------------- +JavaScript "unit" tests +----------------------- JsUnit is included with Luminotes, so to kick off tests of the client-side JavaScript code, simply run: - python2.5 static/js/test/run_tests.py + python2.4 static/js/test/run_tests.py -The run_tests.py script runs the tests inside browser windows and presumes you -have both Firefox and Internet Explorer 6 installed. Edit run_tests.py if you +The run_tests.py script runs the tests inside browser windows and presumes +that you have both Firefox and Internet Explorer 6 installed, and also that +the Luminotes server is running on the local machine. Edit run_tests.py if you need to specify different paths to the browser binaries or want to test with additional browsers. - - -psycopg in Debian Etch ----------------------- - -As of this writing, Debian Etch does not contain a version of psycopg with -support for Python 2.5. However, the version of psycopg in Debian testing does -support Python 2.5. So you can grab the source for python-psycopg2 from Debian -testing, install the build dependencies (including python2.5-dev), and build -the package yourself on an Etch machine. - -Then, edit /usr/share/python/debian_defaults and move "python2.5" from -"unsupported-versions" to "supported-versions". Finally, install the -python-psycopg2 package you've just built, and it should fully support Python -2.5. - -See Debian bug #404355 for more information. Note that it was fixed in -unstable, but not in Etch. diff --git a/controller/Database.py b/controller/Database.py index 04de62a..185e94e 100644 --- a/controller/Database.py +++ b/controller/Database.py @@ -1,219 +1,158 @@ import re -import bsddb +import os +import psycopg2 as psycopg +from psycopg2.pool import PersistentConnectionPool import random -import cPickle -from cStringIO import StringIO -from copy import copy -from model.Persistent import Persistent -from Async import async class Database( object ): ID_BITS = 128 # number of bits within an id ID_DIGITS = "0123456789abcdefghijklmnopqrstuvwxyz" - def __init__( self, scheduler, database_path = None ): + def __init__( self, connection = None ): """ Create a new database and return it. - @type scheduler: Scheduler - @param scheduler: scheduler to use - @type database_path: unicode - @param database_path: path to the database file + @type connection: existing connection object with cursor()/close()/commit() methods, or NoneType + @param connection: database connection to use (optional, defaults to making a connection pool) @rtype: Database - @return: database at the given path + @return: newly constructed Database """ - self.__scheduler = scheduler - self.__env = bsddb.db.DBEnv() - self.__env.open( None, bsddb.db.DB_CREATE | bsddb.db.DB_PRIVATE | bsddb.db.DB_INIT_MPOOL ) - self.__db = bsddb.db.DB( self.__env ) - self.__db.open( database_path, "database", bsddb.db.DB_HASH, bsddb.db.DB_CREATE ) - self.__cache = {} + # This tells PostgreSQL to give us timestamps in UTC. I'd use "set timezone" instead, but that + # makes SQLite angry. + os.putenv( "PGTZ", "UTC" ) - def __persistent_id( self, obj, skip = None ): - # save the object and return its persistent id - if obj != skip and isinstance( obj, Persistent ): - self.__save( obj ) - return obj.object_id + if connection: + self.__connection = connection + self.__pool = None + else: + self.__connection = None + self.__pool = PersistentConnectionPool( + 1, # minimum connections + 50, # maximum connections + "dbname=luminotes user=luminotes password=%s" % os.getenv( "PGPASSWORD", "dev" ), + ) - # returning None indicates that the object should be pickled normally without using a persistent id - return None + def __get_connection( self ): + if self.__connection: + return self.__connection + else: + return self.__pool.getconn() - @async - def save( self, obj, callback = None ): + def save( self, obj, commit = True ): """ - Save the given object to the database, including any objects that it references. + Save the given object to the database. @type obj: Persistent @param obj: object to save - @type callback: generator or NoneType - @param callback: generator to wakeup when the save is complete (optional) + @type commit: bool + @param commit: True to automatically commit after the save """ - self.__save( obj ) - yield callback + connection = self.__get_connection() + cursor = connection.cursor() - def __save( self, obj ): - # if this object's current revision is already saved, bail - revision_id = obj.revision_id() - if revision_id in self.__cache: - return + cursor.execute( obj.sql_exists() ) + if cursor.fetchone(): + cursor.execute( obj.sql_update() ) + else: + cursor.execute( obj.sql_create() ) - object_id = unicode( obj.object_id ).encode( "utf8" ) - revision_id = unicode( obj.revision_id() ).encode( "utf8" ) - secondary_id = obj.secondary_id and unicode( obj.full_secondary_id() ).encode( "utf8" ) or None + if commit: + connection.commit() - # update the cache with this saved object - self.__cache[ object_id ] = obj - self.__cache[ revision_id ] = copy( obj ) - if secondary_id: - self.__cache[ secondary_id ] = obj + def commit( self ): + self.__get_connection().commit() - # set the pickler up to save persistent ids for every object except for the obj passed in, which - # will be pickled normally - buffer = StringIO() - pickler = cPickle.Pickler( buffer, protocol = -1 ) - pickler.persistent_id = lambda o: self.__persistent_id( o, skip = obj ) - - # pickle the object and write it to the database under both its id key and its revision id key - pickler.dump( obj ) - pickled = buffer.getvalue() - self.__db.put( object_id, pickled ) - self.__db.put( revision_id, pickled ) - - # write the pickled object id (only) to the database under its secondary id - if secondary_id: - buffer = StringIO() - pickler = cPickle.Pickler( buffer, protocol = -1 ) - pickler.persistent_id = lambda o: self.__persistent_id( o ) - pickler.dump( obj ) - self.__db.put( secondary_id, buffer.getvalue() ) - - self.__db.sync() - - @async - def load( self, object_id, callback, revision = None ): + def load( self, Object_type, object_id, revision = None ): """ - Load the object corresponding to the given object id from the database, and yield the provided - callback generator with the loaded object as its argument, or None if the object_id is unknown. - If a revision is provided, a specific revision of the object will be loaded. - - @type object_id: unicode - @param object_id: id of the object to load - @type callback: generator - @param callback: generator to send the loaded object to - @type revision: int or NoneType - @param revision: revision of the object to load (optional) - """ - obj = self.__load( object_id, revision ) - yield callback, obj - - def __load( self, object_id, revision = None ): - if revision is not None: - object_id = Persistent.make_revision_id( object_id, revision ) - - object_id = unicode( object_id ).encode( "utf8" ) - - # if the object corresponding to the given id has already been loaded, simply return it without - # loading it again - obj = self.__cache.get( object_id ) - if obj is not None: - return obj - - # grab the object for the given id from the database - buffer = StringIO() - unpickler = cPickle.Unpickler( buffer ) - unpickler.persistent_load = self.__load - - pickled = self.__db.get( object_id ) - if pickled is None or pickled == "": - return None - - buffer.write( pickled ) - buffer.flush() - buffer.seek( 0 ) - - # unpickle the object and update the cache with this saved object - obj = unpickler.load() - if obj is None: - print "error unpickling %s: %s" % ( object_id, pickled ) - return None - self.__cache[ unicode( obj.object_id ).encode( "utf8" ) ] = obj - self.__cache[ unicode( obj.revision_id() ).encode( "utf8" ) ] = copy( obj ) - - return obj - - @async - def reload( self, object_id, callback = None ): - """ - Load and immediately save the object corresponding to the given object id or database key. This - is useful when the object has a __setstate__() method that performs some sort of schema - evolution operation. - - @type object_id: unicode - @param object_id: id or key of the object to reload - @type callback: generator or NoneType - @param callback: generator to wakeup when the save is complete (optional) - """ - self.__reload( object_id ) - yield callback - - def __reload( self, object_id, revision = None ): - object_id = unicode( object_id ).encode( "utf8" ) - - # grab the object for the given id from the database - buffer = StringIO() - unpickler = cPickle.Unpickler( buffer ) - unpickler.persistent_load = self.__load - - pickled = self.__db.get( object_id ) - if pickled is None or pickled == "": - return - - buffer.write( pickled ) - buffer.flush() - buffer.seek( 0 ) - - # unpickle the object. this should trigger __setstate__() if the object has such a method - obj = unpickler.load() - if obj is None: - print "error unpickling %s: %s" % ( object_id, pickled ) - return - self.__cache[ object_id ] = obj - - # set the pickler up to save persistent ids for every object except for the obj passed in, which - # will be pickled normally - buffer = StringIO() - pickler = cPickle.Pickler( buffer, protocol = -1 ) - pickler.persistent_id = lambda o: self.__persistent_id( o, skip = obj ) - - # pickle the object and write it to the database under its id key - pickler.dump( obj ) - pickled = buffer.getvalue() - self.__db.put( object_id, pickled ) - - self.__db.sync() - - def size( self, object_id, revision = None ): - """ - Load the object corresponding to the given object id from the database, and return the size of - its pickled data in bytes. If a revision is provided, a specific revision of the object will be + Load the object corresponding to the given object id from the database and return it, or None if + the object_id is unknown. If a revision is provided, a specific revision of the object will be loaded. + @type Object_type: type + @param Object_type: class of the object to load @type object_id: unicode - @param object_id: id of the object whose size should be returned + @param object_id: id of the object to load @type revision: int or NoneType @param revision: revision of the object to load (optional) + @rtype: Object_type or NoneType + @return: loaded object, or None if no match """ - if revision is not None: - object_id = Persistent.make_revision_id( object_id, revision ) + return self.select_one( Object_type, Object_type.sql_load( object_id, revision ) ) - object_id = unicode( object_id ).encode( "utf8" ) + def select_one( self, Object_type, sql_command ): + """ + Execute the given sql_command and return its results in the form of an object of Object_type, + or None if there was no match. - pickled = self.__db.get( object_id ) - if pickled is None or pickled == "": + @type Object_type: type + @param Object_type: class of the object to load + @type sql_command: unicode + @param sql_command: SQL command to execute + @rtype: Object_type or NoneType + @return: loaded object, or None if no match + """ + connection = self.__get_connection() + cursor = connection.cursor() + + cursor.execute( sql_command ) + + row = cursor.fetchone() + if not row: return None - return len( pickled ) + if Object_type in ( tuple, list ): + return Object_type( row ) + else: + return Object_type( *row ) + + def select_many( self, Object_type, sql_command ): + """ + Execute the given sql_command and return its results in the form of a list of objects of + Object_type. + + @type Object_type: type + @param Object_type: class of the object to load + @type sql_command: unicode + @param sql_command: SQL command to execute + @rtype: list of Object_type + @return: loaded objects + """ + connection = self.__get_connection() + cursor = connection.cursor() + + cursor.execute( sql_command ) + + objects = [] + row = cursor.fetchone() + + while row: + if Object_type in ( tuple, list ): + obj = Object_type( row ) + else: + obj = Object_type( *row ) + + objects.append( obj ) + row = cursor.fetchone() + + return objects + + def execute( self, sql_command, commit = True ): + """ + Execute the given sql_command. + + @type sql_command: unicode + @param sql_command: SQL command to execute + @type commit: bool + @param commit: True to automatically commit after the command + """ + connection = self.__get_connection() + cursor = connection.cursor() + + cursor.execute( sql_command ) + + if commit: + connection.commit() @staticmethod def generate_id(): @@ -231,44 +170,45 @@ class Database( object ): return "".join( digits ) - @async - def next_id( self, callback ): + def next_id( self, Object_type, commit = True ): """ - Generate the next available object id, and yield the provided callback generator with the - object id as its argument. + Generate the next available object id and return it. - @type callback: generator - @param callback: generator to send the next available object id to + @type Object_type: type + @param Object_type: class of the object that the id is for + @type commit: bool + @param commit: True to automatically commit after storing the next id """ + connection = self.__get_connection() + cursor = connection.cursor() + # generate a random id, but on the off-chance that it collides with something else already in # the database, try again next_id = Database.generate_id() - while self.__db.get( next_id, default = None ) is not None: + cursor.execute( Object_type.sql_id_exists( next_id ) ) + + while cursor.fetchone() is not None: next_id = Database.generate_id() + cursor.execute( Object_type.sql_id_exists( next_id ) ) - # save the next_id as a key in the database so that it's not handed out again to another client - self.__db[ next_id ] = "" + # save a new object with the next_id to the database + obj = Object_type( next_id ) + cursor.execute( obj.sql_create() ) - yield callback, next_id + if commit: + connection.commit() + + return next_id - @async def close( self ): """ Shutdown the database. """ - self.__db.close() - self.__env.close() - yield None + if self.__connection: + self.__connection.close() - @async - def clear_cache( self ): - """ - Clear the memory object cache. - """ - self.__cache.clear() - yield None - - scheduler = property( lambda self: self.__scheduler ) + if self.__pool: + self.__pool.closeall() class Valid_id( object ): @@ -289,9 +229,9 @@ class Valid_id( object ): class Valid_revision( object ): """ - Validator for an object id. + Validator for an object revision timestamp. """ - REVISION_PATTERN = re.compile( "^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+$" ) + REVISION_PATTERN = re.compile( "^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+[+-]\d\d(:)?\d\d$" ) def __init__( self, none_okay = False ): self.__none_okay = none_okay diff --git a/controller/Expose.py b/controller/Expose.py index 067cebf..7b83051 100644 --- a/controller/Expose.py +++ b/controller/Expose.py @@ -1,18 +1,10 @@ import cherrypy -from Validate import Validation_error - # module-level variable that, when set to a view, overrides the view for all exposed methods. used # by unit tests view_override = None -class Expose_error( Exception ): - def __init__( self, message ): - Exception.__init__( self, message ) - self.__message = message - - def expose( view = None, rss = None ): """ expose() can be used to tag a method as available for publishing to the web via CherryPy. In @@ -57,8 +49,16 @@ def expose( view = None, rss = None ): # try executing the exposed function try: result = function( *args, **kwargs ) - except Validation_error, error: - result = dict( name = error.name, value = error.value, error = error.message ) + except cherrypy.NotFound: + raise + except Exception, error: + if hasattr( error, "to_dict" ): + result = error.to_dict() + else: + # TODO: it'd be nice to send an email to myself with the traceback + import traceback + traceback.print_exc() + result = dict( error = u"An error occurred when processing your request. Please try again or contact support." ) redirect = result.get( u"redirect", None ) @@ -74,7 +74,7 @@ def expose( view = None, rss = None ): return unicode( view_override( **result ) ) except: if redirect is None: - raise Expose_error( result.get( u"error" ) or result ) + raise # if that doesn't work, and there's a redirect, then redirect del( result[ u"redirect" ] ) diff --git a/controller/Notebooks.py b/controller/Notebooks.py index a825e8a..40cdaf9 100644 --- a/controller/Notebooks.py +++ b/controller/Notebooks.py @@ -1,15 +1,13 @@ import cherrypy -from Scheduler import Scheduler +from datetime import datetime from Expose import expose from Validate import validate, Valid_string, Validation_error, Valid_bool from Database import Valid_id, Valid_revision from Users import grab_user_id -from Updater import wait_for_update, update_client from Expire import strongly_expire from Html_nuker import Html_nuker -from Async import async -from model.Notebook import Notebook -from model.Note import Note +from new_model.Notebook import Notebook +from new_model.Note import Note from view.Main_page import Main_page from view.Json import Json from view.Html_file import Html_file @@ -18,7 +16,7 @@ from view.Html_file import Html_file class Access_error( Exception ): def __init__( self, message = None ): if message is None: - message = u"You don't have access to this notebook." + message = u"Sorry, you don't have access to do that." Exception.__init__( self, message ) self.__message = message @@ -33,12 +31,10 @@ class Notebooks( object ): """ Controller for dealing with notebooks and their notes, corresponding to the "/notebooks" URL. """ - def __init__( self, scheduler, database, users ): + def __init__( self, database, users ): """ Create a new Notebooks object. - @type scheduler: controller.Scheduler - @param scheduler: scheduler to use for asynchronous calls @type database: controller.Database @param database: database that notebooks are stored in @type users: controller.Users @@ -46,7 +42,6 @@ class Notebooks( object ): @rtype: Notebooks @return: newly constructed Notebooks """ - self.__scheduler = scheduler self.__database = database self.__users = users @@ -83,14 +78,11 @@ class Notebooks( object ): @expose( view = Json ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_id = Valid_id( none_okay = True ), - revision = Valid_string( min = 0, max = 30 ), + revision = Valid_revision( none_okay = True ), user_id = Valid_id( none_okay = True ), ) def contents( self, notebook_id, note_id = None, revision = None, user_id = None ): @@ -108,39 +100,37 @@ class Notebooks( object ): @param user_id: id of current logged-in user (if any), determined by @grab_user_id @rtype: json dict @return: { 'notebook': notebookdict, 'note': notedict or None } - @raise Access_error: the current user doesn't have access to the given notebook + @raise Access_error: the current user doesn't have access to the given notebook or note @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) + + if not self.__users.check_access( user_id, notebook_id, read_write = True ): + notebook.read_write = False if notebook is None: note = None elif note_id == u"blank": - note = Note( note_id ) + note = Note.create( note_id ) else: - note = notebook.lookup_note( note_id ) + note = self.__database.load( Note, note_id, revision ) + if note and note.notebook_id != notebook_id: + raise Access_error() - if revision: - self.__database.load( note_id, self.__scheduler.thread, revision ) - note = ( yield Scheduler.SLEEP ) + startup_notes = self.__database.select_many( Note, notebook.sql_load_startup_notes() ) - yield dict( + return dict( notebook = notebook, - startup_notes = notebook.startup_notes, + startup_notes = startup_notes, note = note, ) @expose( view = Json ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_id = Valid_id(), @@ -161,35 +151,24 @@ class Notebooks( object ): @param user_id: id of current logged-in user (if any), determined by @grab_user_id @rtype: json dict @return: { 'note': notedict or None } - @raise Access_error: the current user doesn't have access to the given notebook + @raise Access_error: the current user doesn't have access to the given notebook or note @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + note = self.__database.load( Note, note_id, revision ) - if notebook is None: - note = None - else: - note = notebook.lookup_note( note_id ) + if note and note.notebook_id != notebook_id: + raise Access_error() - if revision: - self.__database.load( note_id, self.__scheduler.thread, revision ) - note = ( yield Scheduler.SLEEP ) - - yield dict( + return dict( note = note, ) @expose( view = Json ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_title = Valid_string( min = 1, max = 500 ), @@ -210,28 +189,23 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if notebook is None: note = None else: - note = notebook.lookup_note_by_title( note_title ) + note = self.__database.select_one( Notebook, notebook.sql_load_note_by_title( note_title ) ) - yield dict( + return dict( note = note, ) @expose( view = Json ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_title = Valid_string( min = 1, max = 500 ), @@ -252,27 +226,61 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if notebook is None: note = None else: - note = notebook.lookup_note_by_title( note_title ) + note = self.__database.select_one( Notebook, notebook.sql_load_note_by_title( note_title ) ) - yield dict( + return dict( note_id = note and note.object_id or None, ) @expose( view = Json ) - @wait_for_update + @strongly_expire + @grab_user_id + @validate( + notebook_id = Valid_id(), + note_id = Valid_id(), + user_id = Valid_id( none_okay = True ), + ) + def load_note_revisions( self, notebook_id, note_id, user_id = None ): + """ + Return the full list of revision timestamps for this note in chronological order. + + @type notebook_id: unicode + @param notebook_id: id of notebook the note is in + @type note_id: unicode + @param note_id: id of note in question + @type user_id: unicode or NoneType + @param user_id: id of current logged-in user (if any), determined by @grab_user_id + @rtype: json dict + @return: { 'revisions': revisionslist or None } + @raise Access_error: the current user doesn't have access to the given notebook or note + @raise Validation_error: one of the arguments is invalid + """ + if not self.__users.check_access( user_id, notebook_id ): + raise Access_error() + + note = self.__database.load( Note, note_id ) + + if note: + if note.notebook_id != notebook_id: + raise Access_error() + revisions = self.__database.select_many( unicode, note.sql_load_revisions() ) + else: + revisions = None + + return dict( + revisions = revisions, + ) + + @expose( view = Json ) @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_id = Valid_id(), @@ -310,186 +318,78 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id, read_write = True ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() - self.__database.load( note_id, self.__scheduler.thread ) - note = ( yield Scheduler.SLEEP ) + note = self.__database.load( Note, note_id ) # check whether the provided note contents have been changed since the previous revision - def update_note( current_notebook, old_note ): + def update_note( current_notebook, old_note, startup ): # the note hasn't been changed, so bail without updating it - if contents == old_note.contents: + if contents == old_note.contents and startup == old_note.startup: new_revision = None # the note has changed, so update it else: - notebook.update_note( note, contents ) + note.contents = contents + note.startup = startup + if startup: + if note.rank is None: + note.rank = self.__database.select_one( float, notebook.sql_highest_rank() ) + 1 + else: + note.rank = None + new_revision = note.revision return new_revision # if the note is already in the given notebook, load it and update it - if note and note in notebook.notes: - self.__database.load( note_id, self.__scheduler.thread, previous_revision ) - old_note = ( yield Scheduler.SLEEP ) + if note and note.notebook_id == notebook.object_id: + old_note = self.__database.load( Note, note_id, previous_revision ) previous_revision = note.revision - new_revision = update_note( notebook, old_note ) + new_revision = update_note( notebook, old_note, startup ) # the note is not already in the given notebook, so look for it in the trash - elif note and notebook.trash and note in notebook.trash.notes: - self.__database.load( note_id, self.__scheduler.thread, previous_revision ) - old_note = ( yield Scheduler.SLEEP ) + elif note and notebook.trash_id and note.notebook_id == notebook.trash_id: + old_note = self.__database.load( Note, note_id, previous_revision ) # undelete the note, putting it back in the given notebook previous_revision = note.revision - notebook.trash.remove_note( note ) - note.deleted_from = None - notebook.add_note( note ) - - new_revision = update_note( notebook, old_note ) + note.notebook_id = notebook.object_id + note.deleted_from_id = None + new_revision = update_note( notebook, old_note, startup ) # otherwise, create a new note else: + if startup: + rank = self.__database.select_one( float, notebook.sql_highest_rank() ) + 1 + else: + rank = None + previous_revision = None - note = Note( note_id, contents ) - notebook.add_note( note ) + note = Note.create( note_id, contents, notebook_id = notebook.object_id, startup = startup, rank = rank ) new_revision = note.revision - if startup: - startup_changed = notebook.add_startup_note( note ) - else: - startup_changed = notebook.remove_startup_note( note ) - - if new_revision or startup_changed: - self.__database.save( notebook, self.__scheduler.thread ) - yield Scheduler.SLEEP - self.__users.update_storage( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - self.__database.save( user ) + if new_revision: + self.__database.save( note, commit = False ) + user = self.__users.update_storage( user_id, commit = False ) + self.__database.commit() else: user = None - yield dict( + return dict( new_revision = new_revision, previous_revision = previous_revision, storage_bytes = user and user.storage_bytes or 0, ) @expose( view = Json ) - @wait_for_update @grab_user_id - @async - @update_client - @validate( - notebook_id = Valid_id(), - note_id = Valid_id(), - user_id = Valid_id( none_okay = True ), - ) - def add_startup_note( self, notebook_id, note_id, user_id ): - """ - Designate a particular note to be shown upon startup, e.g. whenever its notebook is displayed. - The given note must already be within this notebook. - - @type notebook_id: unicode - @param notebook_id: id of notebook the note is in - @type note_id: unicode - @param note_id: id of note to show on startup - @type user_id: unicode or NoneType - @param user_id: id of current logged-in user (if any), determined by @grab_user_id - @rtype: json dict - @return: { 'storage_bytes': current storage usage by user } - @raise Access_error: the current user doesn't have access to the given notebook - @raise Validation_error: one of the arguments is invalid - """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): - raise Access_error() - - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) - - if not notebook: - raise Access_error() - - self.__database.load( note_id, self.__scheduler.thread ) - note = ( yield Scheduler.SLEEP ) - - if note: - notebook.add_startup_note( note ) - self.__database.save( notebook, self.__scheduler.thread ) - yield Scheduler.SLEEP - self.__users.update_storage( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - self.__database.save( user ) - - yield dict( storage_bytes = user.storage_bytes ) - else: - yield dict( storage_bytes = 0 ) - - @expose( view = Json ) - @wait_for_update - @grab_user_id - @async - @update_client - @validate( - notebook_id = Valid_id(), - note_id = Valid_id(), - user_id = Valid_id( none_okay = True ), - ) - def remove_startup_note( self, notebook_id, note_id, user_id ): - """ - Prevent a particular note from being shown on startup, e.g. whenever its notebook is displayed. - The given note must already be within this notebook. - - @type notebook_id: unicode - @param notebook_id: id of notebook the note is in - @type note_id: unicode - @param note_id: id of note to no longer show on startup - @type user_id: unicode or NoneType - @param user_id: id of current logged-in user (if any), determined by @grab_user_id - @rtype: json dict - @return: { 'storage_bytes': current storage usage by user } - @raise Access_error: the current user doesn't have access to the given notebook - @raise Validation_error: one of the arguments is invalid - """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): - raise Access_error() - - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) - - if not notebook: - raise Access_error() - - self.__database.load( note_id, self.__scheduler.thread ) - note = ( yield Scheduler.SLEEP ) - - if note: - notebook.remove_startup_note( note ) - self.__database.save( notebook, self.__scheduler.thread ) - yield Scheduler.SLEEP - self.__users.update_storage( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - self.__database.save( user ) - - yield dict( storage_bytes = user.storage_bytes ) - else: - yield dict( storage_bytes = 0 ) - - @expose( view = Json ) - @wait_for_update - @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_id = Valid_id(), @@ -512,42 +412,34 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id, read_write = True ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() - self.__database.load( note_id, self.__scheduler.thread ) - note = ( yield Scheduler.SLEEP ) + note = self.__database.load( Note, note_id ) - if note: - notebook.remove_note( note ) + if note and note.notebook_id == notebook_id: + if notebook.trash_id: + note.deleted_from_id = notebook_id + note.notebook_id = notebook.trash_id + note.startup = True + else: + note.notebook_id = None - if notebook.trash: - note.deleted_from = notebook.object_id - notebook.trash.add_note( note ) - notebook.trash.add_startup_note( note ) + self.__database.save( note, commit = False ) + user = self.__users.update_storage( user_id, commit = False ) + self.__database.commit() - self.__database.save( notebook, self.__scheduler.thread ) - yield Scheduler.SLEEP - self.__users.update_storage( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - self.__database.save( user ) - - yield dict( storage_bytes = user.storage_bytes ) + return dict( storage_bytes = user.storage_bytes ) else: - yield dict( storage_bytes = 0 ) + return dict( storage_bytes = 0 ) @expose( view = Json ) - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), note_id = Valid_id(), @@ -569,50 +461,39 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id, read_write = True ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() - self.__database.load( note_id, self.__scheduler.thread ) - note = ( yield Scheduler.SLEEP ) + note = self.__database.load( Note, note_id ) - if note and notebook.trash: + if note and notebook.trash_id: # if the note isn't deleted, and it's already in this notebook, just return - if note.deleted_from is None and notebook.lookup_note( note.object_id ): - yield dict( storage_bytes = 0 ) - return + if note.deleted_from_id is None and note.notebook_id == notebook_id: + return dict( storage_bytes = 0 ) # if the note was deleted from a different notebook than the notebook given, raise - if note.deleted_from != notebook_id: + if note.deleted_from_id != notebook_id: raise Access_error() - notebook.trash.remove_note( note ) + note.notebook_id = note.deleted_from_id + note.deleted_from_id = None + note.startup = True - note.deleted_from = None - notebook.add_note( note ) - notebook.add_startup_note( note ) + self.__database.save( note, commit = False ) + user = self.__users.update_storage( user_id, commit = False ) + self.__database.commit() - self.__database.save( notebook, self.__scheduler.thread ) - yield Scheduler.SLEEP - self.__users.update_storage( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - self.__database.save( user ) - - yield dict( storage_bytes = user.storage_bytes ) + return dict( storage_bytes = user.storage_bytes ) else: - yield dict( storage_bytes = 0 ) + return dict( storage_bytes = 0 ) @expose( view = Json ) - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), user_id = Valid_id( none_okay = True ), @@ -632,40 +513,35 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id, read_write = True ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() - for note in notebook.notes: - notebook.remove_note( note ) + notes = self.__database.select_many( Note, notebook.sql_load_notes() ) - if notebook.trash: - note.deleted_from = notebook.object_id - notebook.trash.add_note( note ) - notebook.trash.add_startup_note( note ) + for note in notes: + if notebook.trash_id: + note.deleted_from_id = notebook_id + note.notebook_id = notebook.trash_id + note.startup = True + else: + note.notebook_id = None + self.__database.save( note, commit = False ) - self.__database.save( notebook, self.__scheduler.thread ) - yield Scheduler.SLEEP - self.__users.update_storage( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - self.__database.save( user ) + user = self.__users.update_storage( user_id, commit = False ) + self.__database.commit() - yield dict( + return dict( storage_bytes = user.storage_bytes, ) @expose( view = Json ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), search_text = Valid_string( min = 0, max = 100 ), @@ -688,41 +564,39 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() search_text = search_text.lower() + if len( search_text ) == 0: + return dict( notes = [] ) + title_matches = [] content_matches = [] nuker = Html_nuker() - if len( search_text ) > 0: - for note in notebook.notes: - if note is None: continue - if search_text in nuker.nuke( note.title ).lower(): - title_matches.append( note ) - elif search_text in nuker.nuke( note.contents ).lower(): - content_matches.append( note ) + notes = self.__database.select_many( Note, notebook.sql_search_notes( search_text ) ) - notes = title_matches + content_matches + # further narrow the search results by making sure notes still match after all HTML tags are + # stripped out + for note in notes: + if search_text in nuker.nuke( note.title ).lower(): + title_matches.append( note ) + elif search_text in nuker.nuke( note.contents ).lower(): + content_matches.append( note ) - yield dict( - notes = notes, + return dict( + notes = title_matches + content_matches, ) @expose( view = Json ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), user_id = Valid_id( none_okay = True ), @@ -740,29 +614,23 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() - notes = [ note for note in notebook.notes if note is not None and note.title is not None ] - notes.sort( lambda a, b: cmp( b.revision, a.revision ) ) + notes = self.__database.select_many( Note, notebook.sql_load_notes() ) - yield dict( + return dict( notes = [ ( note.object_id, note.title ) for note in notes ] ) @expose( view = Html_file ) @strongly_expire - @wait_for_update @grab_user_id - @async - @update_client @validate( notebook_id = Valid_id(), user_id = Valid_id( none_okay = True ), @@ -780,42 +648,18 @@ class Notebooks( object ): @raise Access_error: the current user doesn't have access to the given notebook @raise Validation_error: one of the arguments is invalid """ - self.check_access( notebook_id, user_id, self.__scheduler.thread ) - if not ( yield Scheduler.SLEEP ): + if not self.__users.check_access( user_id, notebook_id ): raise Access_error() - self.__database.load( notebook_id, self.__scheduler.thread ) - notebook = ( yield Scheduler.SLEEP ) + notebook = self.__database.load( Notebook, notebook_id ) if not notebook: raise Access_error() - normal_notes = list( set( notebook.notes ) - set( notebook.startup_notes ) ) - normal_notes.sort( lambda a, b: -cmp( a.revision, b.revision ) ) - - yield dict( + startup_notes = self.__database.select_many( Note, notebook.sql_load_startup_notes() ) + other_notes = self.__database.select_many( Note, notebook.sql_load_non_startup_notes() ) + + return dict( notebook_name = notebook.name, - notes = [ note for note in notebook.startup_notes + normal_notes if note is not None ], + notes = startup_notes + other_notes, ) - - @async - def check_access( self, notebook_id, user_id, callback ): - # check if the anonymous user has access to this notebook - self.__database.load( u"User anonymous", self.__scheduler.thread ) - anonymous = ( yield Scheduler.SLEEP ) - - access = False - if anonymous.has_access( notebook_id ): - access = True - - if user_id: - # check if the currently logged in user has access to this notebook - self.__database.load( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - - if user and user.has_access( notebook_id ): - access = True - - yield callback, access - - scheduler = property( lambda self: self.__scheduler ) diff --git a/controller/Old_database.py b/controller/Old_database.py new file mode 100644 index 0000000..e56dd1f --- /dev/null +++ b/controller/Old_database.py @@ -0,0 +1,303 @@ +import re +import bsddb +import random +import cPickle +from cStringIO import StringIO +from copy import copy +from model.Persistent import Persistent +from Async import async + + +class Old_database( object ): + ID_BITS = 128 # number of bits within an id + ID_DIGITS = "0123456789abcdefghijklmnopqrstuvwxyz" + + def __init__( self, scheduler, database_path = None ): + """ + Create a new database and return it. + + @type scheduler: Scheduler + @param scheduler: scheduler to use + @type database_path: unicode + @param database_path: path to the database file + @rtype: Old_database + @return: database at the given path + """ + self.__scheduler = scheduler + self.__env = bsddb.db.DBEnv() + self.__env.open( None, bsddb.db.DB_CREATE | bsddb.db.DB_PRIVATE | bsddb.db.DB_INIT_MPOOL ) + self.__db = bsddb.db.DB( self.__env ) + self.__db.open( database_path, "database", bsddb.db.DB_HASH, bsddb.db.DB_CREATE ) + self.__cache = {} + + def __persistent_id( self, obj, skip = None ): + # save the object and return its persistent id + if obj != skip and isinstance( obj, Persistent ): + self.__save( obj ) + return obj.object_id + + # returning None indicates that the object should be pickled normally without using a persistent id + return None + + @async + def save( self, obj, callback = None ): + """ + Save the given object to the database, including any objects that it references. + + @type obj: Persistent + @param obj: object to save + @type callback: generator or NoneType + @param callback: generator to wakeup when the save is complete (optional) + """ + self.__save( obj ) + yield callback + + def __save( self, obj ): + # if this object's current revision is already saved, bail + revision_id = obj.revision_id() + if revision_id in self.__cache: + return + + object_id = unicode( obj.object_id ).encode( "utf8" ) + revision_id = unicode( obj.revision_id() ).encode( "utf8" ) + secondary_id = obj.secondary_id and unicode( obj.full_secondary_id() ).encode( "utf8" ) or None + + # update the cache with this saved object + self.__cache[ object_id ] = obj + self.__cache[ revision_id ] = copy( obj ) + if secondary_id: + self.__cache[ secondary_id ] = obj + + # set the pickler up to save persistent ids for every object except for the obj passed in, which + # will be pickled normally + buffer = StringIO() + pickler = cPickle.Pickler( buffer, protocol = -1 ) + pickler.persistent_id = lambda o: self.__persistent_id( o, skip = obj ) + + # pickle the object and write it to the database under both its id key and its revision id key + pickler.dump( obj ) + pickled = buffer.getvalue() + self.__db.put( object_id, pickled ) + self.__db.put( revision_id, pickled ) + + # write the pickled object id (only) to the database under its secondary id + if secondary_id: + buffer = StringIO() + pickler = cPickle.Pickler( buffer, protocol = -1 ) + pickler.persistent_id = lambda o: self.__persistent_id( o ) + pickler.dump( obj ) + self.__db.put( secondary_id, buffer.getvalue() ) + + self.__db.sync() + + @async + def load( self, object_id, callback, revision = None ): + """ + Load the object corresponding to the given object id from the database, and yield the provided + callback generator with the loaded object as its argument, or None if the object_id is unknown. + If a revision is provided, a specific revision of the object will be loaded. + + @type object_id: unicode + @param object_id: id of the object to load + @type callback: generator + @param callback: generator to send the loaded object to + @type revision: int or NoneType + @param revision: revision of the object to load (optional) + """ + obj = self.__load( object_id, revision ) + yield callback, obj + + def __load( self, object_id, revision = None ): + if revision is not None: + object_id = Persistent.make_revision_id( object_id, revision ) + + object_id = unicode( object_id ).encode( "utf8" ) + + # if the object corresponding to the given id has already been loaded, simply return it without + # loading it again + obj = self.__cache.get( object_id ) + if obj is not None: + return obj + + # grab the object for the given id from the database + buffer = StringIO() + unpickler = cPickle.Unpickler( buffer ) + unpickler.persistent_load = self.__load + + pickled = self.__db.get( object_id ) + if pickled is None or pickled == "": + return None + + buffer.write( pickled ) + buffer.flush() + buffer.seek( 0 ) + + # unpickle the object and update the cache with this saved object + obj = unpickler.load() + if obj is None: + print "error unpickling %s: %s" % ( object_id, pickled ) + return None + self.__cache[ unicode( obj.object_id ).encode( "utf8" ) ] = obj + self.__cache[ unicode( obj.revision_id() ).encode( "utf8" ) ] = copy( obj ) + + return obj + + @async + def reload( self, object_id, callback = None ): + """ + Load and immediately save the object corresponding to the given object id or database key. This + is useful when the object has a __setstate__() method that performs some sort of schema + evolution operation. + + @type object_id: unicode + @param object_id: id or key of the object to reload + @type callback: generator or NoneType + @param callback: generator to wakeup when the save is complete (optional) + """ + self.__reload( object_id ) + yield callback + + def __reload( self, object_id, revision = None ): + object_id = unicode( object_id ).encode( "utf8" ) + + # grab the object for the given id from the database + buffer = StringIO() + unpickler = cPickle.Unpickler( buffer ) + unpickler.persistent_load = self.__load + + pickled = self.__db.get( object_id ) + if pickled is None or pickled == "": + return + + buffer.write( pickled ) + buffer.flush() + buffer.seek( 0 ) + + # unpickle the object. this should trigger __setstate__() if the object has such a method + obj = unpickler.load() + if obj is None: + print "error unpickling %s: %s" % ( object_id, pickled ) + return + self.__cache[ object_id ] = obj + + # set the pickler up to save persistent ids for every object except for the obj passed in, which + # will be pickled normally + buffer = StringIO() + pickler = cPickle.Pickler( buffer, protocol = -1 ) + pickler.persistent_id = lambda o: self.__persistent_id( o, skip = obj ) + + # pickle the object and write it to the database under its id key + pickler.dump( obj ) + pickled = buffer.getvalue() + self.__db.put( object_id, pickled ) + + self.__db.sync() + + def size( self, object_id, revision = None ): + """ + Load the object corresponding to the given object id from the database, and return the size of + its pickled data in bytes. If a revision is provided, a specific revision of the object will be + loaded. + + @type object_id: unicode + @param object_id: id of the object whose size should be returned + @type revision: int or NoneType + @param revision: revision of the object to load (optional) + """ + if revision is not None: + object_id = Persistent.make_revision_id( object_id, revision ) + + object_id = unicode( object_id ).encode( "utf8" ) + + pickled = self.__db.get( object_id ) + if pickled is None or pickled == "": + return None + + return len( pickled ) + + @staticmethod + def generate_id(): + int_id = random.getrandbits( Old_database.ID_BITS ) + + base = len( Old_database.ID_DIGITS ) + digits = [] + + while True: + index = int_id % base + digits.insert( 0, Old_database.ID_DIGITS[ index ] ) + int_id = int_id / base + if int_id == 0: + break + + return "".join( digits ) + + @async + def next_id( self, callback ): + """ + Generate the next available object id, and yield the provided callback generator with the + object id as its argument. + + @type callback: generator + @param callback: generator to send the next available object id to + """ + # generate a random id, but on the off-chance that it collides with something else already in + # the database, try again + next_id = Old_database.generate_id() + while self.__db.get( next_id, default = None ) is not None: + next_id = Old_database.generate_id() + + # save the next_id as a key in the database so that it's not handed out again to another client + self.__db[ next_id ] = "" + + yield callback, next_id + + @async + def close( self ): + """ + Shutdown the database. + """ + self.__db.close() + self.__env.close() + yield None + + @async + def clear_cache( self ): + """ + Clear the memory object cache. + """ + self.__cache.clear() + yield None + + scheduler = property( lambda self: self.__scheduler ) + + +class Valid_id( object ): + """ + Validator for an object id. + """ + ID_PATTERN = re.compile( "^[%s]+$" % Old_database.ID_DIGITS ) + + def __init__( self, none_okay = False ): + self.__none_okay = none_okay + + def __call__( self, value ): + if self.__none_okay and value in ( None, "None", "" ): return None + if self.ID_PATTERN.search( value ): return str( value ) + + raise ValueError() + + +class Valid_revision( object ): + """ + Validator for an object id. + """ + REVISION_PATTERN = re.compile( "^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d+$" ) + + def __init__( self, none_okay = False ): + self.__none_okay = none_okay + + def __call__( self, value ): + if self.__none_okay and value in ( None, "None", "" ): return None + if self.REVISION_PATTERN.search( value ): return str( value ) + + raise ValueError() diff --git a/controller/Root.py b/controller/Root.py index 87b21f5..a3f0b5f 100644 --- a/controller/Root.py +++ b/controller/Root.py @@ -1,13 +1,11 @@ import cherrypy -from Scheduler import Scheduler from Expose import expose from Validate import validate -from Async import async from Notebooks import Notebooks from Users import Users -from Updater import update_client, wait_for_update from Database import Valid_id +from new_model.Note import Note from view.Main_page import Main_page from view.Json import Json from view.Error_page import Error_page @@ -18,12 +16,10 @@ class Root( object ): """ The root of the controller hierarchy, corresponding to the "/" URL. """ - def __init__( self, scheduler, database, settings ): + def __init__( self, database, settings ): """ Create a new Root object with the given settings. - @type scheduler: controller.Scheduler - @param scheduler: scheduler to use for asynchronous calls @type database: controller.Database @param database: database to use for all controllers @type settings: dict @@ -31,18 +27,16 @@ class Root( object ): @rtype: Root @return: newly constructed Root """ - self.__scheduler = scheduler self.__database = database self.__settings = settings self.__users = Users( - scheduler, database, settings[ u"global" ].get( u"luminotes.http_url", u"" ), settings[ u"global" ].get( u"luminotes.https_url", u"" ), settings[ u"global" ].get( u"luminotes.support_email", u"" ), settings[ u"global" ].get( u"luminotes.rate_plans", [] ), ) - self.__notebooks = Notebooks( scheduler, database, self.__users ) + self.__notebooks = Notebooks( database, self.__users ) @expose() def default( self, password_reset_id ): @@ -72,22 +66,19 @@ class Root( object ): return dict() + # TODO: move this method to controller.Notebooks, and maybe give it a more sensible name @expose( view = Json ) - @wait_for_update - @async - @update_client def next_id( self ): """ - Return the next available database object id. This id is guaranteed to be unique to the - database. + Return the next available database object id for a new note. This id is guaranteed to be unique + among all existing notes. @rtype: json dict @return: { 'next_id': nextid } """ - self.__database.next_id( self.__scheduler.thread ) - next_id = ( yield Scheduler.SLEEP ) + next_id = self.__database.next_id( Note ) - yield dict( + return dict( next_id = next_id, ) @@ -95,28 +86,20 @@ class Root( object ): """ CherryPy HTTP error handler, used to display page not found and generic error pages. """ + support_email = self.__settings[ u"global" ].get( u"luminotes.support_email" ) + if status == 404: cherrypy.response.headerMap[ u"Status" ] = u"404 Not Found" cherrypy.response.status = status - cherrypy.response.body = [ unicode( Not_found_page( self.__settings[ u"global" ].get( u"luminotes.support_email" ) ) ) ] + cherrypy.response.body = [ unicode( Not_found_page( support_email ) ) ] return - import sys + # TODO: it'd be nice to send an email to myself with the traceback import traceback traceback.print_exc() - exc_info = sys.exc_info() - if exc_info: - message = exc_info[ 1 ].message - else: - message = None + cherrypy.response.body = [ unicode( Error_page( support_email ) ) ] - cherrypy.response.body = [ unicode( Error_page( - self.__settings[ u"global" ].get( u"luminotes.support_email" ), - message, - ) ) ] - - scheduler = property( lambda self: self.__scheduler ) database = property( lambda self: self.__database ) notebooks = property( lambda self: self.__notebooks ) users = property( lambda self: self.__users ) diff --git a/controller/Updater.py b/controller/Updater.py deleted file mode 100644 index 2a6927c..0000000 --- a/controller/Updater.py +++ /dev/null @@ -1,72 +0,0 @@ -from Queue import Queue, Empty - - -TIMEOUT_SECONDS = 10.0 - - -def wait_for_update( function ): - """ - A decorator that passes a "queue" keyword arugment to its decorated function, calls the function, - and then blocks until an asynchronous response comes back via the Queue. When a response is - received, wait_for_update() returns it. - - For this decorator to be useful, you should use it to decorate a function that fires off some - asynchronous action and then returns immediately. A typical way to accomplish this is by using - the @async decorator after the @wait_for_update decorator. - """ - def get_message( *args, **kwargs ): - queue = Queue() - - kwargs[ "queue" ] = queue - function( *args, **kwargs ) - - # wait until a response is available in the queue, and then return that response - try: - return queue.get( block = True, timeout = TIMEOUT_SECONDS ) - except Empty: - return { "error": u"A timeout occurred when processing your request. Please try again or contact support." } - - return get_message - - -def update_client( function ): - """ - A decorator used to wrap a generator function so that its yielded values can be issued as - updates to the client. For this to work, the generator function must be invoked with a keyword - argument "queue" containing a Queue where the result can be put(). - - Also supports catching Validation_error exceptions and sending appropriate errors to the client. - - Note that this decorator itself is a generator function and works by passing along next()/send() - calls to its decorated generator. Only yielded values that are dictionaries are sent to the - client via the provided queue. All other types of yielded values are in turn yielded by this - decorator itself. - """ - def put_message( *args, **kwargs ): - # look in the called function's kwargs for the queue where results should be sent - queue = kwargs.pop( "queue" ) - - try: - generator = function( *args, **kwargs ) - message = None - - while True: - result = generator.send( message ) - - if isinstance( result, dict ): - queue.put( result ) - message = ( yield None ) - else: - message = ( yield result ) - except StopIteration: - return - except Exception, error: - # TODO: might be better to use view.Json instead of calling to_dict() manually - if hasattr( error, "to_dict" ): - result = error.to_dict() - queue.put( result ) - else: - queue.put( { "error": u"An error occurred when processing your request. Please try again or contact support." } ) - raise - - return put_message diff --git a/controller/Users.py b/controller/Users.py index 4654575..c440542 100644 --- a/controller/Users.py +++ b/controller/Users.py @@ -1,17 +1,15 @@ import re import cherrypy +from pytz import utc from datetime import datetime, timedelta -from model.User import User -from model.Notebook import Notebook -from model.Note import Note -from model.Password_reset import Password_reset -from Scheduler import Scheduler +from new_model.User import User +from new_model.Notebook import Notebook +from new_model.Note import Note +from new_model.Password_reset import Password_reset from Expose import expose from Validate import validate, Valid_string, Valid_bool, Validation_error from Database import Valid_id -from Updater import update_client, wait_for_update from Expire import strongly_expire -from Async import async from view.Json import Json from view.Main_page import Main_page from view.Redeem_reset_note import Redeem_reset_note @@ -123,12 +121,10 @@ class Users( object ): """ Controller for dealing with users, corresponding to the "/users" URL. """ - def __init__( self, scheduler, database, http_url, https_url, support_email, rate_plans ): + def __init__( self, database, http_url, https_url, support_email, rate_plans ): """ Create a new Users object. - @type scheduler: controller.Scheduler - @param scheduler: scheduler to use for asynchronous calls @type database: controller.Database @param database: database that users are stored in @type http_url: unicode @@ -142,7 +138,6 @@ class Users( object ): @rtype: Users @return: newly constructed Users """ - self.__scheduler = scheduler self.__database = database self.__http_url = http_url self.__https_url = https_url @@ -151,9 +146,6 @@ class Users( object ): @expose( view = Json ) @update_auth - @wait_for_update - @async - @update_client @validate( username = ( Valid_string( min = 1, max = 30 ), valid_username ), password = Valid_string( min = 1, max = 30 ), @@ -184,45 +176,39 @@ class Users( object ): if password != password_repeat: raise Signup_error( u"The passwords you entered do not match. Please try again." ) - self.__database.load( "User %s" % username, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) + user = self.__database.select_one( User, User.sql_load_by_username( username ) ) if user is not None: raise Signup_error( u"Sorry, that username is not available. Please try something else." ) # create a notebook for this user, along with a trash for that notebook - self.__database.next_id( self.__scheduler.thread ) - trash_id = ( yield Scheduler.SLEEP ) - trash = Notebook( trash_id, u"trash" ) + trash_id = self.__database.next_id( Notebook, commit = False ) + trash = Notebook.create( trash_id, u"trash" ) + self.__database.save( trash, commit = False ) - self.__database.next_id( self.__scheduler.thread ) - notebook_id = ( yield Scheduler.SLEEP ) - notebook = Notebook( notebook_id, u"my notebook", trash ) + notebook_id = self.__database.next_id( Notebook, commit = False ) + notebook = Notebook.create( notebook_id, u"my notebook", trash_id ) + self.__database.save( notebook, commit = False ) # create a startup note for this user's notebook - self.__database.next_id( self.__scheduler.thread ) - note_id = ( yield Scheduler.SLEEP ) - note = Note( note_id, file( u"static/html/welcome to your wiki.html" ).read() ) - notebook.add_note( note ) - notebook.add_startup_note( note ) + note_id = self.__database.next_id( Note, commit = False ) + note_contents = file( u"static/html/welcome to your wiki.html" ).read() + note = Note.create( note_id, note_contents, notebook_id, startup = True, rank = 0 ) + self.__database.save( note, commit = False ) # actually create the new user - self.__database.next_id( self.__scheduler.thread ) - user_id = ( yield Scheduler.SLEEP ) + user_id = self.__database.next_id( User, commit = False ) + user = User.create( user_id, username, password, email_address ) + self.__database.save( user, commit = False ) - user = User( user_id, username, password, email_address, notebooks = [ notebook ] ) - self.__database.save( user ) - - # add the new user to the user list - self.__database.load( u"User_list all", self.scheduler.thread ) - user_list = ( yield Scheduler.SLEEP ) - if user_list: - user_list.add_user( user ) - self.__database.save( user_list ) + # record the fact that the new user has access to their new notebook + self.__database.execute( user.sql_save_notebook( notebook_id, read_write = True ), commit = False ) + self.__database.execute( user.sql_save_notebook( trash_id, read_write = True ), commit = False ) + self.__database.commit() redirect = u"/notebooks/%s" % notebook.object_id - yield dict( + return dict( redirect = redirect, authenticated = user, ) @@ -230,9 +216,6 @@ class Users( object ): @expose() @grab_user_id @update_auth - @wait_for_update - @async - @update_client def demo( self, user_id = None ): """ Create a new guest User for purposes of the demo. Start that user with their own Notebook and @@ -250,54 +233,51 @@ class Users( object ): # if the user is already logged in as a guest, then just redirect to their existing demo # notebook if user_id: - self.__database.load( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) - if user.username is None and len( user.notebooks ) > 0: - redirect = u"/notebooks/%s" % user.notebooks[ 0 ].object_id - yield dict( redirect = redirect ) - return + user = self.__database.load( User, user_id ) + first_notebook = self.__database.select_one( Notebook, user.sql_load_notebooks( parents_only = True ) ) + if user.username is None and first_notebook: + redirect = u"/notebooks/%s" % first_notebook.object_id + return dict( redirect = redirect ) # create a demo notebook for this user, along with a trash for that notebook - self.__database.next_id( self.__scheduler.thread ) - trash_id = ( yield Scheduler.SLEEP ) - trash = Notebook( trash_id, u"trash" ) + trash_id = self.__database.next_id( Notebook, commit = False ) + trash = Notebook.create( trash_id, u"trash" ) + self.__database.save( trash, commit = False ) - self.__database.next_id( self.__scheduler.thread ) - notebook_id = ( yield Scheduler.SLEEP ) - notebook = Notebook( notebook_id, u"my notebook", trash ) + notebook_id = self.__database.next_id( Notebook, commit = False ) + notebook = Notebook.create( notebook_id, u"my notebook", trash_id ) + self.__database.save( notebook, commit = False ) # create startup notes for this user's notebook - self.__database.next_id( self.__scheduler.thread ) - note_id = ( yield Scheduler.SLEEP ) - note = Note( note_id, file( u"static/html/this is a demo.html" ).read() ) - notebook.add_note( note ) - notebook.add_startup_note( note ) + note_id = self.__database.next_id( Note, commit = False ) + note_contents = file( u"static/html/this is a demo.html" ).read() + note = Note.create( note_id, note_contents, notebook_id, startup = True, rank = 0 ) + self.__database.save( note, commit = False ) - self.__database.next_id( self.__scheduler.thread ) - note_id = ( yield Scheduler.SLEEP ) - note = Note( note_id, file( u"static/html/welcome to your wiki.html" ).read() ) - notebook.add_note( note ) - notebook.add_startup_note( note ) + note_id = self.__database.next_id( Note, commit = False ) + note_contents = file( u"static/html/welcome to your wiki.html" ).read() + note = Note.create( note_id, note_contents, notebook_id, startup = True, rank = 1 ) + self.__database.save( note, commit = False ) - # actually create the new user. because this is just a demo user, we're not adding it to the User_list - self.__database.next_id( self.__scheduler.thread ) - user_id = ( yield Scheduler.SLEEP ) + # actually create the new user + user_id = self.__database.next_id( User, commit = False ) + user = User.create( user_id, username = None, password = None, email_address = None ) + self.__database.save( user, commit = False ) - user = User( user_id, username = None, password = None, email_address = None, notebooks = [ notebook ] ) - self.__database.save( user ) + # record the fact that the new user has access to their new notebook + self.__database.execute( user.sql_save_notebook( notebook_id, read_write = True ), commit = False ) + self.__database.execute( user.sql_save_notebook( trash_id, read_write = True ), commit = False ) + self.__database.commit() redirect = u"/notebooks/%s" % notebook.object_id - yield dict( + return dict( redirect = redirect, authenticated = user, ) @expose( view = Json ) @update_auth - @wait_for_update - @async - @update_client @validate( username = ( Valid_string( min = 1, max = 30 ), valid_username ), password = Valid_string( min = 1, max = 30 ), @@ -317,28 +297,26 @@ class Users( object ): @raise Authentication_error: invalid username or password @raise Validation_error: one of the arguments is invalid """ - self.__database.load( "User %s" % username, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) + user = self.__database.select_one( User, User.sql_load_by_username( username ) ) if user is None or user.check_password( password ) is False: raise Authentication_error( u"Invalid username or password." ) + first_notebook = self.__database.select_one( Notebook, user.sql_load_notebooks( parents_only = True ) ) + # redirect to the user's first notebook (if any) - if len( user.notebooks ) > 0: - redirect = u"/notebooks/%s" % user.notebooks[ 0 ].object_id + if first_notebook: + redirect = u"/notebooks/%s" % first_notebook.object_id else: redirect = u"/" - yield dict( + return dict( redirect = redirect, authenticated = user, ) @expose( view = Json ) @update_auth - @wait_for_update - @async - @update_client def logout( self ): """ Deauthenticate the user and log them out of their current session. @@ -346,7 +324,7 @@ class Users( object ): @rtype: json dict @return: { 'redirect': url, 'deauthenticated': True } """ - yield dict( + return dict( redirect = self.__http_url + u"/", deauthenticated = True, ) @@ -354,9 +332,6 @@ class Users( object ): @expose( view = Json ) @strongly_expire @grab_user_id - @wait_for_update - @async - @update_client @validate( include_startup_notes = Valid_bool(), user_id = Valid_id( none_okay = True ), @@ -382,38 +357,42 @@ class Users( object ): @raise Validation_error: one of the arguments is invalid """ # if there's no logged-in user, default to the anonymous user - self.__database.load( user_id or u"User anonymous", self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) + anonymous = self.__database.select_one( User, User.sql_load_by_username( u"anonymous" ) ) + if user_id: + user = self.__database.load( User, user_id ) + else: + user = anonymous - if not user: - yield dict( + if not user or not anonymous: + return dict( user = None, notebooks = None, http_url = u"", ) - return # in addition to this user's own notebooks, add to that list the anonymous user's notebooks - self.__database.load( u"User anonymous", self.__scheduler.thread ) - anonymous = ( yield Scheduler.SLEEP ) login_url = None + notebooks = self.__database.select_many( Notebook, anonymous.sql_load_notebooks() ) if user_id: - notebooks = anonymous.notebooks + notebooks += self.__database.select_many( Notebook, user.sql_load_notebooks() ) + # if the user is not logged in, return a login URL else: - notebooks = [] - if len( anonymous.notebooks ) > 0: - anon_notebook = anonymous.notebooks[ 0 ] - login_note = anon_notebook.lookup_note_by_title( u"login" ) + if len( notebooks ) > 0: + main_notebook = notebooks[ 0 ] + login_note = self.__database.select_one( Note, main_notebook.sql_load_note_by_title( u"login" ) ) if login_note: - login_url = "%s/notebooks/%s?note_id=%s" % ( self.__https_url, anon_notebook.object_id, login_note.object_id ) + login_url = "%s/notebooks/%s?note_id=%s" % ( self.__https_url, main_notebook.object_id, login_note.object_id ) - notebooks += user.notebooks + if include_startup_notes and len( notebooks ) > 0: + startup_notes = self.__database.select_many( Note, notebooks[ 0 ].sql_load_startup_notes() ) + else: + startup_notes = [] - yield dict( + return dict( user = user, notebooks = notebooks, - startup_notes = include_startup_notes and len( notebooks ) > 0 and notebooks[ 0 ].startup_notes or [], + startup_notes = startup_notes, http_url = self.__http_url, login_url = login_url, rate_plan = ( user.rate_plan < len( self.__rate_plans ) ) and self.__rate_plans[ user.rate_plan ] or {}, @@ -421,54 +400,64 @@ class Users( object ): def calculate_storage( self, user ): """ - Calculate total storage utilization for all notebooks and all notes of the given user, - including storage for all past revisions. + Calculate total storage utilization for all notes of the given user, including storage for all + past revisions. + @type user: User @param user: user for which to calculate storage utilization @rtype: int @return: total bytes used for storage """ - total_bytes = 0 + return sum( self.__database.select_one( tuple, user.sql_calculate_storage() ), 0 ) - def sum_revisions( obj ): - return \ - self.__database.size( obj.object_id ) + \ - sum( [ self.__database.size( obj.object_id, revision ) or 0 for revision in obj.revisions_list ], 0 ) - - def sum_notebook( notebook ): - return \ - self.__database.size( notebook.object_id ) + \ - sum( [ sum_revisions( note ) for note in notebook.notes ], 0 ) - - for notebook in user.notebooks: - total_bytes += sum_notebook( notebook ) - - if notebook.trash: - total_bytes += sum_notebook( notebook.trash ) - - return total_bytes - - @async - def update_storage( self, user_id, callback = None ): + def update_storage( self, user_id, commit = True ): """ Calculate and record total storage utilization for the given user. - @type user_id: unicode or NoneType + + @type user_id: unicode @param user_id: id of user for which to calculate storage utilization - @type callback: generator or NoneType - @param callback: generator to wakeup when the update is complete (optional) + @type commit: bool + @param commit: True to automatically commit after the update + @rtype: model.User + @return: object of the user corresponding to user_id """ - self.__database.load( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) + user = self.__database.load( User, user_id ) if user: user.storage_bytes = self.calculate_storage( user ) + self.__database.save( user, commit ) - yield callback, user + return user + + def check_access( self, user_id, notebook_id, read_write = False ): + """ + Determine whether the given user has access to the given notebook. + + @type user_id: unicode + @param user_id: id of user whose access to check + @type notebook_id: unicode + @param notebook_id: id of notebook to check access for + @type read_write: bool + @param read_write: True if read-write access is being checked, False if read-only access (defaults to False) + @rtype: bool + @return: True if the user has access + """ + # check if the anonymous user has access to this notebook + anonymous = self.__database.select_one( User, User.sql_load_by_username( u"anonymous" ) ) + + if self.__database.select_one( bool, anonymous.sql_has_access( notebook_id, read_write ) ): + return True + + if user_id: + # check if the given user has access to this notebook + user = self.__database.load( User, user_id ) + + if user and self.__database.select_one( bool, user.sql_has_access( notebook_id ) ): + return True + + return False @expose( view = Json ) - @wait_for_update - @async - @update_client @validate( email_address = ( Valid_string( min = 1, max = 60 ), valid_email_address ), send_reset_button = unicode, @@ -491,19 +480,13 @@ class Users( object ): from email import Message # check whether there are actually any users with the given email address - self.__database.load( u"User_list all", self.scheduler.thread ) - user_list = ( yield Scheduler.SLEEP ) + users = self.__database.select_many( User, User.sql_load_by_email_address( email_address ) ) - if not user_list: - raise Password_reset_error( "There was an error when sending your password reset email. Please contact %s." % self.__support_email ) - - users = [ user for user in user_list.users if user.email_address == email_address ] if len( users ) == 0: raise Password_reset_error( u"There are no Luminotes users with the email address %s" % email_address ) # record the sending of this reset email - self.__database.next_id( self.__scheduler.thread ) - password_reset_id = ( yield Scheduler.SLEEP ) + password_reset_id = self.__database.next_id( Password_reset, commit = False ) password_reset = Password_reset( password_reset_id, email_address ) self.__database.save( password_reset ) @@ -527,15 +510,12 @@ class Users( object ): server.sendmail( message[ u"from" ], [ email_address ], message.as_string() ) server.quit() - yield dict( + return dict( message = u"Please check your inbox. A password reset email has been sent to %s" % email_address, ) @expose( view = Main_page ) @strongly_expire - @wait_for_update - @async - @update_client @validate( password_reset_id = Valid_id(), ) @@ -550,43 +530,34 @@ class Users( object ): @raise Password_reset_error: an error occured when redeeming the password reset, such as an expired link @raise Validation_error: one of the arguments is invalid """ - self.__database.load( u"User anonymous", self.__scheduler.thread ) - anonymous = ( yield Scheduler.SLEEP ) + anonymous = self.__database.select_one( User, User.sql_load_by_username( u"anonymous" ) ) + if anonymous: + main_notebook = self.__database.select_one( Notebook, anonymous.sql_load_notebooks() ) - if not anonymous or len( anonymous.notebooks ) == 0: + if not anonymous or not main_notebook: raise Password_reset_error( "There was an error when completing your password reset. Please contact %s." % self.__support_email ) - self.__database.load( password_reset_id, self.__scheduler.thread ) - password_reset = ( yield Scheduler.SLEEP ) + password_reset = self.__database.load( Password_reset, password_reset_id ) - if not password_reset or datetime.now() - password_reset.revision > timedelta( hours = 25 ): + if not password_reset or datetime.now( tz = utc ) - password_reset.revision > timedelta( hours = 25 ): raise Password_reset_error( "Your password reset link has expired. Please request a new password reset email." ) if password_reset.redeemed: raise Password_reset_error( "Your password has already been reset. Please request a new password reset email." ) - self.__database.load( u"User_list all", self.__scheduler.thread ) - user_list = ( yield Scheduler.SLEEP ) - - if not user_list: - raise Password_reset_error( u"There are no Luminotes users with the email address %s" % password_reset.email_address ) - # find the user(s) with the email address from the password reset request - matching_users = [ user for user in user_list.users if user.email_address == password_reset.email_address ] + matching_users = self.__database.select_many( User, User.sql_load_by_email_address( password_reset.email_address ) ) if len( matching_users ) == 0: raise Password_reset_error( u"There are no Luminotes users with the email address %s" % password_reset.email_address ) - yield dict( - notebook_id = anonymous.notebooks[ 0 ].object_id, + return dict( + notebook_id = main_notebook.object_id, note_id = u"blank", note_contents = unicode( Redeem_reset_note( password_reset_id, matching_users ) ), ) @expose( view = Json ) - @wait_for_update - @async - @update_client def reset_password( self, password_reset_id, reset_button, **new_passwords ): """ Reset all the users with the provided passwords. @@ -606,27 +577,19 @@ class Users( object ): except ValueError: raise Validation_error( "password_reset_id", password_reset_id, id_validator, "is not a valid id" ) - self.__database.load( password_reset_id, self.__scheduler.thread ) - password_reset = ( yield Scheduler.SLEEP ) + password_reset = self.__database.load( Password_reset, password_reset_id ) - if not password_reset or datetime.now() - password_reset.revision > timedelta( hours = 25 ): + if not password_reset or datetime.now( tz = utc ) - password_reset.revision > timedelta( hours = 25 ): raise Password_reset_error( "Your password reset link has expired. Please request a new password reset email." ) if password_reset.redeemed: raise Password_reset_error( "Your password has already been reset. Please request a new password reset email." ) - self.__database.load( u"User_list all", self.__scheduler.thread ) - user_list = ( yield Scheduler.SLEEP ) - - if not user_list: - raise Password_reset_error( "There was an error when resetting your password. Please contact %s." % self.__support_email ) - - # find the user(s) with the email address from the password reset request - matching_users = [ user for user in user_list.users if user.email_address == password_reset.email_address ] + matching_users = self.__database.select_many( User, User.sql_load_by_email_address( password_reset.email_address ) ) allowed_user_ids = [ user.object_id for user in matching_users ] # reset any passwords that are non-blank - users_to_reset = [] + at_least_one_reset = False for ( user_id, ( new_password, new_password_repeat ) ) in new_passwords.items(): if user_id not in allowed_user_ids: raise Password_reset_error( "There was an error when resetting your password. Please contact %s." % self.__support_email ) @@ -635,8 +598,7 @@ class Users( object ): if new_password == u"" and new_password_repeat == u"": continue - self.__database.load( user_id, self.__scheduler.thread ) - user = ( yield Scheduler.SLEEP ) + user = self.__database.load( User, user_id ) if not user: raise Password_reset_error( "There was an error when resetting your password. Please contact %s." % self.__support_email ) @@ -649,19 +611,16 @@ class Users( object ): if len( new_password ) > 30: raise Password_reset_error( u"Your password can be no longer than 30 characters." ) - users_to_reset.append( ( user, new_password ) ) - - for ( user, new_password ) in users_to_reset: + at_least_one_reset = True user.password = new_password - self.__database.save( user ) + self.__database.save( user, commit = False ) # if all the new passwords provided are blank, bail - if not users_to_reset: + if not at_least_one_reset: raise Password_reset_error( u"Please enter a new password. Or, if you already know your password, just click the login link above." ) password_reset.redeemed = True - self.__database.save( password_reset ) + self.__database.save( password_reset, commit = False ) + self.__database.commit() - yield dict( redirect = u"/" ) - - scheduler = property( lambda self: self.__scheduler ) + return dict( redirect = u"/" ) diff --git a/controller/Validate.py b/controller/Validate.py index 131eb62..5f0963b 100644 --- a/controller/Validate.py +++ b/controller/Validate.py @@ -32,7 +32,7 @@ class Validation_error( Exception ): def to_dict( self ): return dict( - error = u"The %s %s." % ( self.__name, self.__message ), + error = u"The %s %s." % ( self.__name.replace( u"_", " " ), self.__message ), name = self.__name, value = self.__value, ) diff --git a/controller/test/Stub_database.py b/controller/test/Stub_database.py new file mode 100644 index 0000000..52b0d30 --- /dev/null +++ b/controller/test/Stub_database.py @@ -0,0 +1,72 @@ +from copy import copy + + +class Stub_database( object ): + def __init__( self, connection = None ): + # map of object id to list of saved objects (presumably in increasing order of revisions) + self.objects = {} + self.user_notebook = {} # map of user_id to ( notebook_id, read_write ) + self.__next_id = 0 + + def save( self, obj, commit = False ): + if obj.object_id in self.objects: + self.objects[ obj.object_id ].append( copy( obj ) ) + else: + self.objects[ obj.object_id ] = [ copy( obj ) ] + + def load( self, Object_type, object_id, revision = None ): + obj_list = self.objects.get( object_id ) + + if not obj_list: + return None + + # if a particular revision wasn't requested, just return the most recently saved object + # matching the given object_id + if revision is None: + if not isinstance( obj_list[ -1 ], Object_type ): + return None + return copy( obj_list[ -1 ] ) + + # a particular revision was requested, so pick it out of the objects matching the given id + matching_objs = [ obj for obj in obj_list if str( obj.revision ) == str( revision ) ] + if len( matching_objs ) > 0: + if not isinstance( matching_objs[ -1 ], Object_type ): + return None + return copy( matching_objs[ -1 ] ) + + return None + + def select_one( self, Object_type, sql_command ): + if callable( sql_command ): + result = sql_command( self ) + if isinstance( result, list ): + if len( result ) == 0: return None + return result[ 0 ] + return result + + raise NotImplementedError( sql_command ) + + def select_many( self, Object_type, sql_command ): + if callable( sql_command ): + result = sql_command( self ) + if isinstance( result, list ): + return result + return [ result ] + + raise NotImplementedError( sql_command ) + + def execute( self, sql_command, commit = False ): + if callable( sql_command ): + return sql_command( self ) + + raise NotImplementedError( sql_command ) + + def next_id( self, Object_type, commit = True ): + self.__next_id += 1 + return unicode( self.__next_id ) + + def commit( self ): + pass + + def close( self ): + pass diff --git a/controller/test/Stub_object.py b/controller/test/Stub_object.py new file mode 100644 index 0000000..6daef90 --- /dev/null +++ b/controller/test/Stub_object.py @@ -0,0 +1,79 @@ +from datetime import datetime +from new_model.Persistent import Persistent, quote + + +def notz_quote( value ): + """ + Apparently, pysqlite2 chokes on timestamps that have a timezone when reading them out of the + database, so for purposes of the unit tests, strip off the timezone on all datetime objects. + """ + if isinstance( value, datetime ): + value = value.replace( tzinfo = None ) + + return quote( value ) + + +class Stub_object( Persistent ): + def __init__( self, object_id, revision = None, value = None, value2 = None ): + Persistent.__init__( self, object_id, revision ) + self.__value = value + self.__value2 = value2 + + @staticmethod + def sql_load( object_id, revision = None ): + if revision: + return "select * from stub_object where id = %s and revision = %s;" % ( quote( object_id ), notz_quote( revision ) ) + + return "select * from stub_object where id = %s order by revision desc limit 1;" % quote( object_id ) + + @staticmethod + def sql_id_exists( object_id, revision = None ): + if revision: + return "select id from stub_object where id = %s and revision = %s;" % ( quote( object_id ), notz_quote( revision ) ) + + return "select id from stub_object where id = %s order by revision desc limit 1;" % quote( object_id ) + + def sql_exists( self ): + return Stub_object.sql_id_exists( self.object_id, self.revision ) + + def sql_create( self ): + return \ + "insert into stub_object ( id, revision, value, value2 ) " + \ + "values ( %s, %s, %s, %s );" % \ + ( quote( self.object_id ), notz_quote( self.revision ), quote( self.__value ), + quote( self.__value2 ) ) + + def sql_update( self ): + return self.sql_create() + + @staticmethod + def sql_load_em_all(): + return "select * from stub_object;" + + @staticmethod + def sql_create_table(): + return \ + """ + create table stub_object ( + id text not null, + revision timestamp with time zone not null, + value integer, + value2 integer + ); + """ + + @staticmethod + def sql_tuple(): + return "select 1, 2;" + + def __set_value( self, value ): + self.update_revision() + self.__value = value + + def __set_value2( self, value2 ): + self.update_revision() + self.__value2 = value2 + + value = property( lambda self: self.__value, __set_value ) + value2 = property( lambda self: self.__value2, __set_value2 ) + diff --git a/controller/test/Test_controller.py b/controller/test/Test_controller.py index 8ad62a6..a1b3ed2 100644 --- a/controller/test/Test_controller.py +++ b/controller/test/Test_controller.py @@ -1,18 +1,178 @@ import cherrypy -from controller.Scheduler import Scheduler -from controller.Database import Database -from controller.test.Stub_view import Stub_view +from Stub_database import Stub_database +from Stub_view import Stub_view from config import Common from datetime import datetime from StringIO import StringIO class Test_controller( object ): + def __init__( self ): + from new_model.User import User + from new_model.Notebook import Notebook + from new_model.Note import Note + + # Since Stub_database isn't a real database and doesn't know SQL, replace some of the + # SQL-returning methods in User, Note, and Notebook to return functions that manipulate data in + # Stub_database directly instead. This is all a little fragile, but it's better than relying on + # the presence of a real database for unit tests. + def sql_save_notebook( self, notebook_id, read_write, database ): + if self.object_id in database.user_notebook: + database.user_notebook[ self.object_id ].append( ( notebook_id, read_write ) ) + else: + database.user_notebook[ self.object_id ] = [ ( notebook_id, read_write ) ] + + User.sql_save_notebook = lambda self, notebook_id, read_write = False: \ + lambda database: sql_save_notebook( self, notebook_id, read_write, database ) + + def sql_load_notebooks( self, parents_only, database ): + notebooks = [] + notebook_tuples = database.user_notebook.get( self.object_id ) + + if not notebook_tuples: return None + + for notebook_tuple in notebook_tuples: + ( notebook_id, read_write ) = notebook_tuple + notebook = database.objects.get( notebook_id )[ -1 ] + notebook._Notebook__read_write = read_write + if parents_only and notebook.trash_id is None: + continue + notebooks.append( notebook ) + + return notebooks + + User.sql_load_notebooks = lambda self, parents_only = False: \ + lambda database: sql_load_notebooks( self, parents_only, database ) + + def sql_load_by_username( username, database ): + users = [] + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, User ) and obj.username == username: + users.append( obj ) + + return users + + User.sql_load_by_username = staticmethod( lambda username: \ + lambda database: sql_load_by_username( username, database ) ) + + def sql_load_by_email_address( email_address, database ): + users = [] + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, User ) and obj.email_address == email_address: + users.append( obj ) + + return users + + User.sql_load_by_email_address = staticmethod( lambda email_address: \ + lambda database: sql_load_by_email_address( email_address, database ) ) + + def sql_calculate_storage( self, database ): + return ( 17, 3, 4, 22 ) # rather than actually calculating anything, return arbitrary numbers + + User.sql_calculate_storage = lambda self: \ + lambda database: sql_calculate_storage( self, database ) + + def sql_has_access( self, notebook_id, read_write, database ): + for ( user_id, notebook_tuples ) in database.user_notebook.items(): + for notebook_tuple in notebook_tuples: + ( db_notebook_id, db_read_write ) = notebook_tuple + + if self.object_id == user_id and notebook_id == db_notebook_id: + if read_write is True and db_read_write is False: + return False + return True + + return False + + User.sql_has_access = lambda self, notebook_id, read_write = False: \ + lambda database: sql_has_access( self, notebook_id, read_write, database ) + + def sql_load_revisions( self, database ): + note_list = database.objects.get( self.object_id ) + if not note_list: return None + + revisions = [ note.revision for note in note_list ] + return revisions + + Note.sql_load_revisions = lambda self: \ + lambda database: sql_load_revisions( self, database ) + + def sql_load_notes( self, database ): + notes = [] + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, Note ) and obj.notebook_id == self.object_id: + notes.append( obj ) + + notes.sort( lambda a, b: -cmp( a.revision, b.revision ) ) + return notes + + Notebook.sql_load_notes = lambda self: \ + lambda database: sql_load_notes( self, database ) + + def sql_load_startup_notes( self, database ): + notes = [] + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, Note ) and obj.notebook_id == self.object_id and obj.startup: + notes.append( obj ) + + return notes + + Notebook.sql_load_startup_notes = lambda self: \ + lambda database: sql_load_startup_notes( self, database ) + + def sql_load_note_by_title( self, title, database ): + notes = [] + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, Note ) and obj.notebook_id == self.object_id and obj.title == title: + notes.append( obj ) + + return notes + + Notebook.sql_load_note_by_title = lambda self, title: \ + lambda database: sql_load_note_by_title( self, title, database ) + + def sql_search_notes( self, search_text, database ): + notes = [] + search_text = search_text.lower() + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, Note ) and obj.notebook_id == self.object_id and \ + search_text in obj.contents.lower(): + notes.append( obj ) + + return notes + + Notebook.sql_search_notes = lambda self, search_text: \ + lambda database: sql_search_notes( self, search_text, database ) + + def sql_highest_rank( self, database ): + max_rank = -1 + + for ( object_id, obj_list ) in database.objects.items(): + obj = obj_list[ -1 ] + if isinstance( obj, Note ) and obj.notebook_id == self.object_id and obj.rank > max_rank: + max_rank = obj.rank + + return max_rank + + Notebook.sql_highest_rank = lambda self: \ + lambda database: sql_highest_rank( self, database ) + def setUp( self ): from controller.Root import Root cherrypy.lowercase_api = True - self.scheduler = Scheduler() - self.database = Database( self.scheduler, database_path = None ) + self.database = Stub_database() self.settings = { u"global": { u"luminotes.http_url" : u"http://luminotes.com", @@ -33,7 +193,7 @@ class Test_controller( object ): }, } - cherrypy.root = Root( self.scheduler, self.database, self.settings ) + cherrypy.root = Root( self.database, self.settings ) cherrypy.config.update( Common.settings ) cherrypy.config.update( { u"server.log_to_screen": False } ) cherrypy.server.start( init_only = True, server_class = None ) @@ -45,7 +205,6 @@ class Test_controller( object ): def tearDown( self ): cherrypy.server.stop() - self.scheduler.shutdown() def http_get( self, http_path, headers = None, session_id = None, pretend_https = False ): """ @@ -64,7 +223,7 @@ class Test_controller( object ): proxy_ip = self.settings[ "global" ].get( u"luminotes.http_proxy_ip" ) request = cherrypy.server.request( ( proxy_ip, 1234 ), u"127.0.0.5" ) - response = request.run( "GET %s HTTP/1.0" % http_path, headers = headers, rfile = StringIO() ) + response = request.run( "GET %s HTTP/1.0" % str( http_path ), headers = headers, rfile = StringIO() ) session_id = response.simple_cookie.get( u"session_id" ) if session_id: session_id = session_id.value @@ -103,7 +262,7 @@ class Test_controller( object ): headers.append( ( u"Cookie", "session_id=%s" % session_id ) ) # will break if unicode is used for the value request = cherrypy.server.request( ( u"127.0.0.1", 1234 ), u"127.0.0.5" ) - response = request.run( "POST %s HTTP/1.0" % http_path, headers = headers, rfile = StringIO( post_data ) ) + response = request.run( "POST %s HTTP/1.0" % str( http_path ), headers = headers, rfile = StringIO( post_data ) ) session_id = response.simple_cookie.get( u"session_id" ) if session_id: session_id = session_id.value diff --git a/controller/test/Test_database.py b/controller/test/Test_database.py index 1cb7e28..8cf085e 100644 --- a/controller/test/Test_database.py +++ b/controller/test/Test_database.py @@ -1,323 +1,188 @@ +from pytz import utc +from pysqlite2 import dbapi2 as sqlite +from datetime import datetime +from Stub_object import Stub_object from controller.Database import Database -from controller.Scheduler import Scheduler -from model.Persistent import Persistent - - -class Some_object( Persistent ): - def __init__( self, object_id, value, value2 = None, secondary_id = None ): - Persistent.__init__( self, object_id, secondary_id ) - self.__value = value - self.__value2 = value2 - - def __set_value( self, value ): - self.update_revision() - self.__value = value - - def __set_value2( self, value2 ): - self.update_revision() - self.__value2 = value2 - - value = property( lambda self: self.__value, __set_value ) - value2 = property( lambda self: self.__value2, __set_value2 ) class Test_database( object ): - def __init__( self, clear_cache = True ): - self.clear_cache = clear_cache - def setUp( self ): - self.scheduler = Scheduler() - self.database = Database( self.scheduler ) - next_id = None + # make an in-memory sqlite database to use in place of PostgreSQL during testing + self.connection = sqlite.connect( ":memory:", detect_types = sqlite.PARSE_DECLTYPES | sqlite.PARSE_COLNAMES ) + cursor = self.connection.cursor() + cursor.execute( Stub_object.sql_create_table() ) + + self.database = Database( self.connection ) def tearDown( self ): self.database.close() - self.scheduler.shutdown() def test_save_and_load( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1 ) - original_revision = basic_obj.revision + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - self.database.load( basic_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) + self.database.save( basic_obj ) + obj = self.database.load( Stub_object, basic_obj.object_id ) - assert obj.object_id == basic_obj.object_id - assert obj.revision == original_revision - assert obj.revisions_list == [ original_revision ] - assert obj.value == basic_obj.value + assert obj.object_id == basic_obj.object_id + assert obj.revision.replace( tzinfo = utc ) == original_revision + assert obj.value == basic_obj.value - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + def test_save_and_load_without_commit( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision - def test_complex_save_and_load( self ): - def gen(): - basic_obj = Some_object( object_id = "7", value = 2 ) - basic_original_revision = basic_obj.revision - complex_obj = Some_object( object_id = "6", value = basic_obj ) - complex_original_revision = complex_obj.revision + self.database.save( basic_obj, commit = False ) + self.connection.rollback() # if commit wasn't called, this should back out the save + obj = self.database.load( Stub_object, basic_obj.object_id ) - self.database.save( complex_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - self.database.load( complex_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) - if self.clear_cache: self.database.clear_cache() + assert obj == None - assert obj.object_id == complex_obj.object_id - assert obj.revision == complex_original_revision - assert obj.revisions_list == [ complex_original_revision ] - assert obj.value.object_id == basic_obj.object_id - assert obj.value.value == basic_obj.value - assert obj.value.revision == basic_original_revision - assert obj.value.revisions_list == [ basic_original_revision ] + def test_save_and_load_with_explicit_commit( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision - self.database.load( basic_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) + self.database.save( basic_obj, commit = False ) + self.database.commit() + self.connection.rollback() # should have no effect because of the call to commit + obj = self.database.load( Stub_object, basic_obj.object_id ) - assert obj.object_id == basic_obj.object_id - assert obj.value == basic_obj.value - assert obj.revision == basic_original_revision - assert obj.revisions_list == [ basic_original_revision ] + assert obj.object_id == basic_obj.object_id + assert obj.revision.replace( tzinfo = utc ) == original_revision + assert obj.value == basic_obj.value - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + def test_select_one( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision - def test_save_and_load_by_secondary( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1, secondary_id = u"foo" ) - original_revision = basic_obj.revision + self.database.save( basic_obj ) + obj = self.database.select_one( Stub_object, Stub_object.sql_load( basic_obj.object_id ) ) - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - self.database.load( u"Some_object foo", self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) + assert obj.object_id == basic_obj.object_id + assert obj.revision.replace( tzinfo = utc ) == original_revision + assert obj.value == basic_obj.value - assert obj.object_id == basic_obj.object_id - assert obj.value == basic_obj.value - assert obj.revision == original_revision - assert obj.revisions_list == [ original_revision ] + def test_select_one_tuple( self ): + obj = self.database.select_one( tuple, Stub_object.sql_tuple() ) - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + assert len( obj ) == 2 + assert obj[ 0 ] == 1 + assert obj[ 1 ] == 2 - def test_duplicate_save_and_load( self ): - def gen(): - basic_obj = Some_object( object_id = "9", value = 3 ) - basic_original_revision = basic_obj.revision - complex_obj = Some_object( object_id = "8", value = basic_obj, value2 = basic_obj ) - complex_original_revision = complex_obj.revision + def test_select_many( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision + basic_obj2 = Stub_object( object_id = "6", value = 2 ) + original_revision2 = basic_obj2.revision - self.database.save( complex_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - self.database.load( complex_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) - if self.clear_cache: self.database.clear_cache() + self.database.save( basic_obj ) + self.database.save( basic_obj2 ) + objs = self.database.select_many( Stub_object, Stub_object.sql_load_em_all() ) - assert obj.object_id == complex_obj.object_id - assert obj.revision == complex_original_revision - assert obj.revisions_list == [ complex_original_revision ] + assert len( objs ) == 2 + assert objs[ 0 ].object_id == basic_obj.object_id + assert objs[ 0 ].revision.replace( tzinfo = utc ) == original_revision + assert objs[ 0 ].value == basic_obj.value + assert objs[ 1 ].object_id == basic_obj2.object_id + assert objs[ 1 ].revision.replace( tzinfo = utc ) == original_revision2 + assert objs[ 1 ].value == basic_obj2.value - assert obj.value.object_id == basic_obj.object_id - assert obj.value.value == basic_obj.value - assert obj.value.revision == basic_original_revision - assert obj.value.revisions_list == [ basic_original_revision ] + def test_select_many_tuples( self ): + objs = self.database.select_many( tuple, Stub_object.sql_tuple() ) - assert obj.value2.object_id == basic_obj.object_id - assert obj.value2.value == basic_obj.value - assert obj.value2.revision == basic_original_revision - assert obj.value2.revisions_list == [ basic_original_revision ] + assert len( objs ) == 1 + assert len( objs[ 0 ] ) == 2 + assert objs[ 0 ][ 0 ] == 1 + assert objs[ 0 ][ 1 ] == 2 - assert obj.value == obj.value2 + def test_select_many_with_no_matches( self ): + objs = self.database.select_many( Stub_object, Stub_object.sql_load_em_all() ) - self.database.load( basic_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) - - assert obj.object_id == basic_obj.object_id - assert obj.value == basic_obj.value - assert obj.revision == basic_original_revision - assert obj.revisions_list == [ basic_original_revision ] - - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + assert len( objs ) == 0 def test_save_and_load_revision( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1 ) - original_revision = basic_obj.revision + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() + self.database.save( basic_obj ) + basic_obj.value = 2 - basic_obj.value = 2 + self.database.save( basic_obj ) + obj = self.database.load( Stub_object, basic_obj.object_id ) - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - self.database.load( basic_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) - if self.clear_cache: self.database.clear_cache() + assert obj.object_id == basic_obj.object_id + assert obj.revision.replace( tzinfo = utc ) == basic_obj.revision + assert obj.value == basic_obj.value - assert obj.object_id == basic_obj.object_id - assert obj.revision == basic_obj.revision - assert obj.revisions_list == [ original_revision, basic_obj.revision ] - assert obj.value == basic_obj.value + revised = self.database.load( Stub_object, basic_obj.object_id, revision = original_revision ) - self.database.load( basic_obj.object_id, self.scheduler.thread, revision = original_revision ) - revised = ( yield Scheduler.SLEEP ) + assert revised.object_id == basic_obj.object_id + assert revised.value == 1 + assert revised.revision.replace( tzinfo = utc ) == original_revision - assert revised.object_id == basic_obj.object_id - assert revised.value == 1 - assert revised.revision == original_revision - assert id( obj.revisions_list ) != id( revised.revisions_list ) - assert revised.revisions_list == [ original_revision ] + def test_execute( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + self.database.execute( basic_obj.sql_create() ) + obj = self.database.load( Stub_object, basic_obj.object_id ) + + assert obj.object_id == basic_obj.object_id + assert obj.revision.replace( tzinfo = utc ) == original_revision + assert obj.value == basic_obj.value + + def test_execute_without_commit( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision + + self.database.execute( basic_obj.sql_create(), commit = False ) + self.connection.rollback() + obj = self.database.load( Stub_object, basic_obj.object_id ) + + assert obj == None + + def test_execute_with_explicit_commit( self ): + basic_obj = Stub_object( object_id = "5", value = 1 ) + original_revision = basic_obj.revision + + self.database.execute( basic_obj.sql_create(), commit = False ) + self.database.commit() + obj = self.database.load( Stub_object, basic_obj.object_id ) + + assert obj.object_id == basic_obj.object_id + assert obj.revision.replace( tzinfo = utc ) == original_revision + assert obj.value == basic_obj.value def test_load_unknown( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1 ) - self.database.load( basic_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) - - assert obj == None - - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) - - def test_reload( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1 ) - original_revision = basic_obj.revision - - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - - def setstate( self, state ): - state[ "_Some_object__value" ] = 55 - self.__dict__.update( state ) - - Some_object.__setstate__ = setstate - - self.database.reload( basic_obj.object_id, self.scheduler.thread ) - yield Scheduler.SLEEP - delattr( Some_object, "__setstate__" ) - if self.clear_cache: self.database.clear_cache() - - self.database.load( basic_obj.object_id, self.scheduler.thread ) - obj = ( yield Scheduler.SLEEP ) - - assert obj.object_id == basic_obj.object_id - assert obj.value == 55 - assert obj.revision == original_revision - assert obj.revisions_list == [ original_revision ] - - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) - - def test_reload_revision( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1 ) - original_revision = basic_obj.revision - original_revision_id = basic_obj.revision_id() - - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - - basic_obj.value = 2 - - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - - def setstate( self, state ): - state[ "_Some_object__value" ] = 55 - self.__dict__.update( state ) - - Some_object.__setstate__ = setstate - - self.database.reload( original_revision_id, self.scheduler.thread ) - yield Scheduler.SLEEP - delattr( Some_object, "__setstate__" ) - if self.clear_cache: self.database.clear_cache() - - self.database.load( basic_obj.object_id, self.scheduler.thread, revision = original_revision ) - obj = ( yield Scheduler.SLEEP ) - - assert obj.object_id == basic_obj.object_id - assert obj.revision == original_revision - assert obj.revisions_list == [ original_revision ] - assert obj.value == 55 - - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) - - def test_size( self ): - def gen(): - basic_obj = Some_object( object_id = "5", value = 1 ) - original_revision = basic_obj.revision - - self.database.save( basic_obj, self.scheduler.thread ) - yield Scheduler.SLEEP - if self.clear_cache: self.database.clear_cache() - - size = self.database.size( basic_obj.object_id ) - - from cPickle import Pickler - from StringIO import StringIO - buffer = StringIO() - pickler = Pickler( buffer, protocol = -1 ) - pickler.dump( basic_obj ) - expected_size = len( buffer.getvalue() ) - - # as long as the size is close to the expected size, that's fine - assert abs( size - expected_size ) < 10 - - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + basic_obj = Stub_object( object_id = "5", value = 1 ) + obj = self.database.load( Stub_object, basic_obj.object_id ) + assert obj == None def test_next_id( self ): - def gen(): - self.database.next_id( self.scheduler.thread ) - next_id = ( yield Scheduler.SLEEP ) - assert next_id - prev_ids = [ next_id ] + next_id = self.database.next_id( Stub_object ) + assert next_id + assert self.database.load( Stub_object, next_id ) + prev_ids = [ next_id ] - self.database.next_id( self.scheduler.thread ) - next_id = ( yield Scheduler.SLEEP ) - assert next_id - assert next_id not in prev_ids - prev_ids.append( next_id ) + next_id = self.database.next_id( Stub_object ) + assert next_id + assert next_id not in prev_ids + assert self.database.load( Stub_object, next_id ) + prev_ids.append( next_id ) - self.database.next_id( self.scheduler.thread ) - next_id = ( yield Scheduler.SLEEP ) - assert next_id - assert next_id not in prev_ids + next_id = self.database.next_id( Stub_object ) + assert next_id + assert next_id not in prev_ids + assert self.database.load( Stub_object, next_id ) - g = gen() - self.scheduler.add( g ) - self.scheduler.wait_for( g ) + def test_next_id_without_commit( self ): + next_id = self.database.next_id( Stub_object, commit = False ) + self.connection.rollback() + assert self.database.load( Stub_object, next_id ) == None - -class Test_database_without_clearing_cache( Test_database ): - def __init__( self ): - Test_database.__init__( self, clear_cache = False ) + def test_next_id_with_explit_commit( self ): + next_id = self.database.next_id( Stub_object, commit = False ) + self.database.commit() + assert next_id + assert self.database.load( Stub_object, next_id ) diff --git a/controller/test/Test_notebooks.py b/controller/test/Test_notebooks.py index 64a7efc..8f41209 100644 --- a/controller/test/Test_notebooks.py +++ b/controller/test/Test_notebooks.py @@ -2,10 +2,9 @@ import cherrypy import cgi from urllib import quote from Test_controller import Test_controller -from controller.Scheduler import Scheduler -from model.Notebook import Notebook -from model.Note import Note -from model.User import User +from new_model.Notebook import Notebook +from new_model.Note import Note +from new_model.User import User class Test_notebooks( Test_controller ): @@ -23,57 +22,51 @@ class Test_notebooks( Test_controller ): self.anonymous = None self.session_id = None - thread = self.make_notebooks() - self.scheduler.add( thread ) - self.scheduler.wait_for( thread ) - - thread = self.make_users() - self.scheduler.add( thread ) - self.scheduler.wait_for( thread ) + self.make_notebooks() + self.make_users() + self.database.commit() def make_notebooks( self ): - self.database.next_id( self.scheduler.thread ) - self.trash = Notebook( ( yield Scheduler.SLEEP ), u"trash", ) - self.database.next_id( self.scheduler.thread ) - self.notebook = Notebook( ( yield Scheduler.SLEEP ), u"notebook", self.trash ) + self.trash = Notebook.create( self.database.next_id( Notebook ), u"trash" ) + self.database.save( self.trash, commit = False ) + self.notebook = Notebook.create( self.database.next_id( Notebook ), u"notebook", self.trash.object_id ) + self.database.save( self.notebook, commit = False ) - self.database.next_id( self.scheduler.thread ) - note_id = ( yield Scheduler.SLEEP ) - self.note = Note( note_id, u"
blah
" - new_note = Note( "55", title_with_tags + junk + more_junk ) + new_note = Note.create( "55", title_with_tags + junk + more_junk ) previous_revision = new_note.revision result = self.http_post( "/notebooks/save_note/", dict( @@ -766,8 +837,9 @@ class Test_notebooks( Test_controller ): assert result[ "new_revision" ] and result[ "new_revision" ] != previous_revision assert result[ "previous_revision" ] == None - assert self.user.storage_bytes > 0 - assert result[ "storage_bytes" ] == self.user.storage_bytes + user = self.database.load( User, self.user.object_id ) + assert user.storage_bytes > 0 + assert result[ "storage_bytes" ] == user.storage_bytes # make sure the new title is now loadable result = self.http_post( "/notebooks/load_note_by_title/", dict( @@ -789,7 +861,7 @@ class Test_notebooks( Test_controller ): # save a completely new note contents = "Previous revision from " + short_revision + "
" + note_text; } var startup = this.startup_notes[ id ]; - var editor = new Editor( id, this.notebook_id, note_text, deleted_from, revisions_list, read_write, startup, highlight, focus ); + var editor = new Editor( id, this.notebook_id, note_text, deleted_from_id, revision, read_write, startup, highlight, focus ); if ( this.read_write ) { connect( editor, "state_changed", this, "editor_state_changed" ); @@ -613,7 +620,7 @@ Wiki.prototype.editor_key_pressed = function ( editor, event ) { this.create_blank_editor( event ); // ctrl-h: hide note } else if ( code == 72 ) { - if ( !editor.deleted_from ) + if ( !editor.deleted_from_id ) this.hide_editor( event ); // ctrl-d: delete note } else if ( code == 68 ) { @@ -727,7 +734,7 @@ Wiki.prototype.delete_editor = function ( event, editor ) { if ( editor == this.focused_editor ) this.focused_editor = null; - if ( this.notebook.trash && !editor.empty() ) { + if ( this.notebook.trash_id && !editor.empty() ) { var undo_button = createDOM( "input", { "type": "button", "class": "message_button", @@ -735,7 +742,7 @@ Wiki.prototype.delete_editor = function ( event, editor ) { "title": "undo deletion" } ); var trash_link = createDOM( "a", { - "href": "/notebooks/" + this.notebook.trash.object_id + "?parent_id=" + this.notebook.object_id + "href": "/notebooks/" + this.notebook.trash_id + "?parent_id=" + this.notebook.object_id }, "trash" ); this.display_message( 'The note has been moved to the', [ trash_link, ". ", undo_button ] ) var self = this; @@ -767,7 +774,7 @@ Wiki.prototype.undelete_editor_via_trash = function ( event, editor ) { if ( this.read_write && editor.read_write ) { var self = this; this.invoker.invoke( "/notebooks/undelete_note", "POST", { - "notebook_id": editor.deleted_from, + "notebook_id": editor.deleted_from_id, "note_id": editor.id }, function ( result ) { self.display_storage_usage( result.storage_bytes ); } ); } @@ -816,13 +823,12 @@ Wiki.prototype.save_editor = function ( editor, fire_and_forget ) { var self = this; if ( editor && editor.read_write && !editor.empty() ) { - var revisions = editor.revisions_list; this.invoker.invoke( "/notebooks/save_note", "POST", { "notebook_id": this.notebook_id, "note_id": editor.id, "contents": editor.contents(), "startup": editor.startup, - "previous_revision": revisions.length ? revisions[ revisions.length - 1 ] : "None" + "previous_revision": editor.revision ? editor.revision : "None" }, function ( result ) { self.update_editor_revisions( result, editor ); self.display_storage_usage( result.storage_bytes ); @@ -835,8 +841,8 @@ Wiki.prototype.update_editor_revisions = function ( result, editor ) { if ( !result.new_revision ) return; - var revisions = editor.revisions_list; - var client_previous_revision = revisions.length ? revisions[ revisions.length - 1 ] : null; + var client_previous_revision = editor.revision; + editor.revision = result.new_revision; // if the server's idea of the previous revision doesn't match the client's, then someone has // gone behind our back and saved the editor's note from another window @@ -854,11 +860,15 @@ Wiki.prototype.update_editor_revisions = function ( result, editor ) { self.compare_versions( event, editor, result.previous_revision ); } ); - revisions.push( result.previous_revision ); + if ( !editor.revisions_list || editor.revisions_list.length == 0 ) + return; + editor.revisions_list.push( result.previous_revision ); } // add the new revision to the editor's revisions list - revisions.push( result.new_revision ); + if ( !editor.revisions_list || editor.revisions_list.length == 0 ) + return; + editor.revisions_list.push( result.new_revision ); } Wiki.prototype.search = function ( event ) { @@ -898,7 +908,7 @@ Wiki.prototype.display_search_results = function ( result ) { } // otherwise, create an editor for the one note - this.create_editor( note.object_id, note.contents, note.deleted_from, note.revisions_list, undefined, this.read_write, true, true ); + this.create_editor( note.object_id, note.contents, note.deleted_from_id, note.revision, this.read_write, true, true ); return; } @@ -936,7 +946,7 @@ Wiki.prototype.display_search_results = function ( result ) { ); } - this.search_results_editor = this.create_editor( "search_results", "