The Assimilation Project  based on Assimilation version 1.1.7.1474836767
store.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 # vim: smartindent tabstop=4 shiftwidth=4 expandtab number colorcolumn=100
3 #
4 # This file is part of the Assimilation Project.
5 #
6 # Author: Alan Robertson <alanr@unix.sh>
7 # Copyright (C) 2013 - Assimilation Systems Limited
8 #
9 # Free support is available from the Assimilation Project community - http://assimproj.org
10 # Paid support is available from Assimilation Systems Limited - http://assimilationsystems.com
11 #
12 # The Assimilation software is free software: you can redistribute it and/or modify
13 # it under the terms of the GNU General Public License as published by
14 # the Free Software Foundation, either version 3 of the License, or
15 # (at your option) any later version.
16 #
17 # The Assimilation software is distributed in the hope that it will be useful,
18 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # GNU General Public License for more details.
21 #
22 # You should have received a copy of the GNU General Public License
23 # along with the Assimilation Project software. If not, see http://www.gnu.org/licenses/
24 #
25 #
26 # W0212 -- access to a protected member of a client class (we do this a lot)
27 # pylint: disable=W0212
28 '''
29 Store module - contains a transactional batch implementation of Nigel Small's
30 Object-Graph-Mapping API (or something a lot like it)
31 '''
32 import re, inspect, weakref
33 from collections import namedtuple
34 #import traceback
35 import sys # only for stderr
36 from datetime import datetime, timedelta
37 import py2neo
38 from py2neo import neo4j, GraphError
39 from assimevent import AssimEvent
40 
41 # R0902: Too many instance attributes (17/10)
42 # R0904: Too many public methods (27/20)
43 # pylint: disable=R0902,R0904
44 class Store(object):
45  '''This 'Store' class is a transaction-oriented implementation of Nigel Small's
46  OGM (Object-Graph-Mapping) API - with a few extensions and a few things not implemented.
47 
48  Unimplemented APIs
49  -----------------
50  The following member functions aren't provided:
51  is_saved() - replaced by the transaction_pending property
52 
53  Some interesting extensions:
54  ----------------------------
55  - You can tell the Store constructor things about your Classes and Indexes
56  makes handling indexes simpler and less error prone. This affects the
57  save() method and makes it usable far more often.
58  - All updates are happen in a batch job - as a single transaction.
59  - You never need to call save once an object was created. We
60  track changes to the attributes.
61 
62  New methods:
63  -----------
64  commit saves all modifications in a single transaction
65  load_in_related load objects we're related to by incoming relationships
66  load_cypher_nodes generator which yields a vector of sametype nodes from a cypher query
67  load_cypher_node return a single object from a cypher query
68  load_cypher_query return iterator with objects for fields
69  separate_in separate objects we're related to by incoming relationships
70  node returns the neo4j.Node object associated with an object
71  id returns the id of the neo4j.Node associated with an object
72  is_uniqueindex returns True if the given index name is known to be unique
73  __str__ formats information about this Store
74  transaction_pending -- Property: True means a transaction is pending
75  stats a data member containing statistics in a dict
76  reset_stats Reset statistics counters and timers
77 
78  The various save functions do nothing immediately. Updates are delayed until
79  the commit member function is called.
80 
81  Restrictions:
82  -------------
83  You can't delete something in the same transaction that you created it.
84  This could probably be fixed, but would take some effort, and seems unlikely
85  to be useful.
86 
87  Objects associated with Nodes must be subclasses of object
88 
89  Caveats, Warnings and so on...
90  ------------------------------
91  You can delete the same the same relationship or node multiple times in a transaction.
92 
93  Attributes beginning with _ are not replicated as Node properties.
94 
95  There are various times when a constructor is called to create an object from a
96  node. Those 'constructors' can be factory functions that construct the right kind
97  of object for the type of node involved.
98 
99  Such constructors are called with the arguments which correspond to the Node
100  properties - but only those which they will legally take (according to python
101  introspection). It is assumed that argument names correspond to attribute
102  (Node property) names.
103 
104  Any remaining properties not created by the constructor are assigned to the object
105  as attributes. This is likely not what Nigel did, but it seems sensible.
106 
107  Probably should have created some new exception type for a few of our cases.
108  I'm not compatible with Nigel in terms of exceptions raised. We mostly
109  raise ValueError()...
110 
111  In particular, it is possible for data in the database to be incompatible with
112  object constructors which would be a bad thing worth recognizing -- but I don't
113  do anything special for this case at the moment.
114  '''
115  LUCENE_RE = re.compile(r'([\-+&\|!\(\)\{\}[\]^"~\*?:\\])')
116  LUCENE_RE = re.compile(r'([:[\]])')
117 
118  debug = False
119  log = None
120 
121  def __init__(self, db, uniqueindexmap=None, classkeymap=None, readonly=False):
122  '''
123  Constructor for Transactional Write (Batch) Store objects
124  ---------
125  Parameters:
126  db - Database to associate with this object
127  uniqueindexmap - Dict of indexes, True means its a unique index, False == nonunique
128  classkeymap - Map of classes to index attributes - indexed by Class or Class name
129  Values are another Dict with these values:
130  'index': name of index
131  'key': constant key value
132  'kattr': object attribute for key
133  'value': constant key 'value'
134  'vattr': object attribute for key 'value'
135  '''
136  self.db = db
137  self.readonly = readonly
138  self.stats = {}
139  self.reset_stats()
140  self.clients = {}
141  self.newrels = []
142  self.deletions = []
143  self.classes = {}
144  self.weaknoderefs = {}
145  if classkeymap is None:
146  classkeymap = {}
147  if uniqueindexmap is None:
148  uniqueindexmap = {}
149  for classkey in classkeymap.keys():
150  uniqueindexmap[classkey] = True
151  self.uniqueindexmap = uniqueindexmap
152  self.batch = None
153  self.batchindex = None
154  if len(classkeymap) > 0 and not isinstance(classkeymap.keys()[0], str):
155  # Then the map should be indexed by the classes themselves
156  newmap = {}
157  for cls in classkeymap.keys():
158  newmap[cls.__name__] = classkeymap[cls]
159  classkeymap = newmap
160  self.classkeymap = classkeymap
167 
168 
169  def __str__(self):
170  'Render our Store object as a string for debugging'
171  ret = '{\n\tdb: %s' % self.db
172  ret += ',\n\tclasses: %s' % self.classes
173  if False and self.uniqueindexmap:
174  ret += ',\n\tuniqueindexmap: %s' % self.uniqueindexmap
175  if False and self.uniqueindexmap:
176  ret += ',\n\tclasskeymap: %s' % self.classkeymap
177  ret += '\n\tbatchindex: %s' % self.batchindex
178  for attr in ('clients', 'newrels', 'deletions'):
179  avalue = getattr(self, attr)
180  acomma = '['
181  s = "\n"
182  for each in avalue:
183  s += ('%s%s' % (acomma, each))
184  acomma = ', '
185  ret += ",\n\t%10s: %s" % (attr, s)
186  ret += '\n%s\n' % self.fmt_dirty_attrs()
187  ret += '\n\tweaknoderefs: %s' % self.weaknoderefs
188 
189  ret += '\n\tstats: %s' % self.stats
190  ret += '\n\tbatch: %s' % self.batch
191  ret += '\n}'
192  return ret
193 
194  @staticmethod
195  def lucene_escape(query):
196  'Returns a string with the lucene special characters escaped'
197  return Store.LUCENE_RE.sub(r'\\\1', query)
198 
199  @staticmethod
200  def id(subj):
201  'Returns the id of the neo4j.Node associated with the given object'
202  if subj.__store_node.bound:
203  return subj.__store_node._id
204  return None
205 
206  @staticmethod
207  def has_node(subj):
208  'Returns True if this object has an associated Neo4j Node object'
209  return hasattr(subj, '_Store__store_node')
210 
211  @staticmethod
212  def getstore(subj):
213  'Returns the Store associated with this object'
214  return subj.__store if hasattr(subj, '_Store__store') else None
215 
216  @staticmethod
217  def is_abstract(subj):
218  'Returns True if the underlying database node is Abstract'
219  if not hasattr(subj, '_Store__store_node'):
220  return True
221  return not subj.__store_node.bound
222 
223  @staticmethod
224  def bound(subj):
225  'Returns True if the underlying database node is bound (i.e., not abstract)'
226  return subj.__store_node.bound
227 
228  def is_uniqueindex(self, index_name):
229  'Return True if this index is known to be a unique index'
230  if self.uniqueindexmap is not None and index_name in self.uniqueindexmap:
231  return self.uniqueindexmap[index_name]
232  return False
233 
234  # For debugging...
235  def dump_clients(self):
236  'Dump out all our client objects and their supported attribute values and states'
237  for client in self.clients:
238  print >> sys.stderr, ('Client %s:' % client)
239  for attr in Store._safe_attr_names(client):
240  if attr in client.__store_dirty_attrs.keys():
241  print >> sys.stderr, ('%10s: Dirty - %s' % (attr, client.__dict__[attr]))
242  else:
243  print >> sys.stderr, ('%10s: Clean - %s' % (attr, client.__dict__[attr]))
244 
245  def fmt_dirty_attrs(self):
246  'Format dirty our client objects and their modified attribute values and states'
247  result='"Dirty Attrs": {'
248  for client in self.clients:
249  namedyet = False
250  for attr in Store._safe_attr_names(client):
251  if not hasattr(client, '__store_dirty_attrs'):
252  continue
253  if attr in client.__store_dirty_attrs.keys():
254  if not namedyet:
255  result += ('Client %s:%s: {' % (client, Store.id(client)))
256  namedyet = True
257  result += ('%10s: %s,' % (attr, client.__dict__[attr]))
258  if namedyet:
259  result += '}\n'
260  result += '}'
261  return result
262 
263  def save_indexed(self, index_name, key, value, *subj):
264  'Save the given (new) object as an indexed node'
265  if self.readonly:
266  raise RuntimeError('Attempt to save an object to a read-only store')
267  if not isinstance(subj, tuple) and not isinstance(subj, list):
268  subj = (subj,)
269  for obj in subj:
270  self._register(obj, neo4j.Node(**Store.safe_attrs(obj))
271  , index=index_name, key=key, value=value, unique=False)
272 
273  def save_unique(self, index_name, key, value, subj):
274  'Save the given new object as a uniquely indexed node'
275  self.save_indexed(index_name, key, value, subj)
276  # Override save_indexed's judgment that it's not unique...
277  subj.__store_index_unique = True
278 
279  def save(self, subj, node=None):
280  '''Save an object:
281  - into a new node
282  - into an existing node
283 
284  It will be indexed if its class is a known indexed class
285  not indexed if its class is not a known indexed class
286 
287  If the index is known to be a unique index, then it will
288  be saved unique - otherwise it won't be unique
289  '''
290  if self.readonly:
291  raise RuntimeError('Attempt to save an object to a read-only store')
292  if node is not None:
293  if subj in self.clients:
294  raise ValueError('Cannot save existing node into a new node')
295  self._register(subj, node=node)
296  return
297 
298  # Figure out all the indexed stuff...
299  cls = subj.__class__
300  if cls.__name__ not in self.classkeymap:
301  # Not an indexed object...
302  if subj not in self.clients:
303  self._register(subj, neo4j.Node(**Store.safe_attrs(subj)))
304  return subj
305  (index, key, value) = self._get_idx_key_value(cls, subj.__dict__)
306 
307  # Now save it...
308  if self.is_uniqueindex(index):
309  self.save_unique(index, key, value, subj)
310  else:
311  self.save_indexed(index, key, value, subj)
312  return subj
313 
314  def delete(self, subj):
315  'Delete the saved object and all its relationships from the database'
316  if not hasattr(subj, '_Store__store_node'):
317  raise ValueError('Object not associated with the Store system')
318  if self.readonly:
319  raise RuntimeError('Attempt to delete an object from a read-only store')
320  node = subj.__store_node
321  if not node.bound:
322  raise ValueError('Node cannot be abstract')
323  self.separate(subj)
324  self.separate_in(subj)
325  self.deletions.append(subj)
326 
327  def refresh(self, subj):
328  'Refresh the information in the given object from the database'
329  node = self.db.node(subj.__store_node._id)
330  return self._construct_obj_from_node(node, subj.__class__)
331 
332 
333  def load_indexed(self, index_name, key, value, cls):
334  '''
335  Return the specified set of 'cls' objects from the given index
336  ---------
337  Parameters:
338  index_name - name of index to retrieve objects from
339  key - key value of nodes to be retrieved
340  value - 'value' of nodes to be retrieved
341  cls - a class to construct -- or a function to call
342  which constructs the desired object
343  '''
344 
345  idx = self.db.legacy.get_index(neo4j.Node, index_name)
346  nodes = idx.get(key, value)
347  #print ('idx["%s",%s].get("%s", "%s") => %s' % (index_name, idx, key, value, nodes))
348  ret = []
349  for node in nodes:
350  ret.append(self._construct_obj_from_node(node, cls))
351  #print ('load_indexed: returning %s' % ret[0].__dict__)
352  return ret
353 
354  def load(self, cls, **clsargs):
355  '''Load a pre-existing object from its constructor arguments.
356  '''
357  if cls.__name__ not in self.classkeymap:
358  print >> sys.stderr, (self.classkeymap)
359  raise ValueError("Class [%s] does not have a known index [%s]"
360  % (cls.__name__, self.classkeymap))
361  try:
362  (index_name, idxkey, idxvalue) = self._get_idx_key_value(cls, clsargs, subj=clsargs)
363  except KeyError:
364  # On rare occasions, constructors create some default "unique" values
365  # which don't appear as arguments, then the call above fails with KeyError.
366  # The rest of the time, just using subj=clsargs is cheaper...
367  # There are no cases where it produces different results.
368  # If we're called by load_or_save() it will call the constructor again,
369  # so it seems good to avoid that.
370  subj = self.save(self.callconstructor(cls, clsargs))
371  (index_name, idxkey, idxvalue) = self._get_idx_key_value(cls, clsargs, subj=subj)
372  if not self.is_uniqueindex(index_name):
373  raise ValueError("Class [%s] is not a unique indexed class" % cls)
374 
375  # See if we can find this node in memory somewhere...
376  ret = self._localsearch(cls, idxkey, idxvalue)
377  if ret is not None:
378  return ret
379 
380  try:
381  node = self.db.legacy.get_indexed_node(index_name, idxkey, idxvalue)
382  except GraphError:
383  return None
384  return self._construct_obj_from_node(node, cls, clsargs) if node is not None else None
385 
386  def load_or_create(self, cls, **clsargs):
387  '''Analogous to 'save' - for loading an object or creating it if it
388  doesn't exist
389  '''
390  obj = self.load(cls, **clsargs)
391  if obj is not None:
392  return obj
393  return self.save(self.callconstructor(cls, clsargs))
394 
395 
396  def relate(self, subj, rel_type, obj, properties=None):
397  '''Define a 'rel_type' relationship subj-[:rel_type]->obj'''
398  assert not isinstance(obj, str)
399  if self.readonly:
400  raise RuntimeError('Attempt to relate objects in a read-only store')
401  self.newrels.append({'from':subj, 'to':obj, 'type':rel_type, 'props':properties})
402  if Store.debug:
403  print >> sys.stderr, 'NEW RELATIONSHIP FROM %s to %s' % (subj, obj)
404  if not Store.is_abstract(subj):
405  print >> sys.stderr, 'FROM id is %s' % Store.id(subj)
406  if not Store.is_abstract(obj):
407  print >> sys.stderr, 'TO id is %s' % Store.id(obj)
408 
409  def relate_new(self, subj, rel_type, obj, properties=None):
410  '''Define a 'rel_type' relationship subj-[:rel_type]->obj'''
411  subjnode = subj.__store_node
412  objnode = obj.__store_node
413 
414  # Check for relationships created in this transaction...
415  for rel in self.newrels:
416  if rel['from'] is subj and rel['to'] is obj and rel['type'] == rel_type:
417  return
418  # Check for pre-existing relationships
419  if objnode.bound and subjnode.bound:
420  rels = [rel for rel in subjnode.match_outgoing(rel_type, objnode)]
421  if len(rels) > 0:
422  return
423  self.relate(subj, rel_type, obj, properties)
424 
425  def separate(self, subj, rel_type=None, obj=None):
426  'Separate nodes related by the specified relationship type'
427  fromnode = subj.__store_node
428  if not fromnode.bound:
429  raise ValueError('Subj Node cannot be abstract')
430  if obj is not None:
431  obj = obj.__store_node
432  if not obj.bound:
433  raise ValueError('Obj Node cannot be abstract')
434 
435  # No errors - give it a shot!
436  rels = subj.__store_node.match_outgoing(rel_type, obj)
437  for rel in rels:
438  if Store.debug:
439  print ('DELETING RELATIONSHIP %s of type %s: %s' % (rel._id, rel_type, rel))
440  if obj is not None:
441  assert rel.end_node._id == obj._id
442  self.deletions.append(rel)
443 
444  def separate_in(self, subj, rel_type=None, obj=None):
445  'Separate nodes related by the specified relationship type'
446  fromnode = subj.__store_node
447  if not fromnode.bound:
448  raise ValueError('Node cannot be abstract')
449  if obj is not None:
450  obj = obj.__store_node
451  if not obj.bound:
452  raise ValueError('Node cannot be abstract')
453 
454  # No errors - give it a shot!
455  rels = subj.__store_node.match_incoming(rel_type)
456  for rel in rels:
457  self.deletions.append(rel)
458 
459  def load_related(self, subj, rel_type, cls):
460  'Load all outgoing-related nodes with the specified relationship type'
461  # It would be really nice to be able to filter on relationship properties
462  # All it would take would be to write a little Cypher query
463  # Of course, that still leaves the recently-created case unhandled...
464 
465  if Store.is_abstract(subj):
466  # @TODO Should search recently created relationships...
467  #raise ValueError('Node to load related to cannot be abstract')
468  return
469  rels = subj.__store_node.match_outgoing(rel_type)
470  for rel in rels:
471  yield self._construct_obj_from_node(rel.end_node, cls)
472 
473  def load_in_related(self, subj, rel_type, cls):
474  'Load all incoming-related nodes with the specified relationship type'
475  if not subj.__store_node.bound:
476  # All it would take would be to write a little Cypher query
477  # Of course, that still leaves the recently-created case unhandled...
478  # @TODO Should search recently created relationships...
479  raise ValueError('Node to load related from cannot be abstract')
480  rels = subj.__store_node.match_incoming(rel_type)
481  for rel in rels:
482  yield (self._construct_obj_from_node(rel.start_node, cls))
483 
484  def load_cypher_nodes(self, querystr, cls, params=None, maxcount=None, debug=False):
485  '''Execute the given query that yields a single column of nodes
486  all of the same Class (cls) and yield each of those Objects in turn
487  through an iterator (generator)'''
488  count = 0
489  if params is None:
490  params = {}
491  if debug:
492  print >> sys.stderr, 'Starting query %s(%s)' % (querystr, params)
493  for row in self.db.cypher.stream(querystr, **params):
494  if debug:
495  print >> sys.stderr, 'Received Row from stream: %s' % (row)
496  for key in row.__producer__.columns:
497  if debug:
498  print >> sys.stderr, 'looking for column %s' % (key)
499  node = getattr(row, key)
500  if node is None:
501  if debug:
502  print >> sys.stderr, 'getattr(%s) failed' % key
503  continue
504  yval = self.constructobj(cls, node)
505  if debug:
506  print >> sys.stderr, 'yielding row %d (%s)' % (count, yval)
507  yield yval
508  count += 1
509  if maxcount is not None and count >= maxcount:
510  if debug:
511  print >> sys.stderr, 'quitting on maxcount (%d)' % count
512  break
513  if debug:
514  print >> sys.stderr, 'quitting on end of query output (%d)' % count
515  return
516 
517  def load_cypher_node(self, query, cls, params=None):
518  'Load a single node as a result of a Cypher query'
519  if params is None:
520  params = {}
521  for node in self.load_cypher_nodes(query, cls, params, maxcount=1):
522  return node
523  return None
524 
525  def load_cypher_query(self, querystr, clsfact, params=None, maxcount=None):
526  '''Iterator returning results from a query translated into classes, and so on
527  Each iteration returns a namedtuple with node fields as classes, etc.
528  Note that 'clsfact' must be a class "factory" capable of translating any
529  type of node encountered into the corresponding objects.
530  Return result is a generator.
531  '''
532  count = 0
533  if params is None:
534  params = {}
535  rowfields = None
536  rowclass = None
537  for row in self.db.cypher.stream(querystr, **params):
538  if rowfields is None:
539  rowfields = row.__producer__.columns
540  rowclass = namedtuple('FilteredRecord', rowfields)
541  yieldval = []
542  for attr in rowfields:
543  yieldval.append(self._yielded_value(getattr(row, attr), clsfact))
544  count += 1
545  if maxcount is not None and count > maxcount:
546  return
547  yield rowclass._make(yieldval)
548 
549  def _yielded_value(self, value, clsfact):
550  'Return the value for us to yield - supporting collection objects'
551  if isinstance(value, neo4j.Node):
552  obj = self.constructobj(clsfact, value)
553  return obj
554  elif isinstance(value, neo4j.Relationship):
555  from graphnodes import NeoRelationship
556  return NeoRelationship(value)
557  elif isinstance(value, neo4j.Path):
558  return '''Sorry, Path values aren't yet supported'''
559  elif isinstance(value, (list, tuple)):
560  ret = []
561  for elem in value:
562  ret.append(self._yielded_value(elem, clsfact))
563  return ret
564  else:
565  # Integers, strings, None, etc.
566  return value
567 
568 
569  @property
571  'Return True if we have pending transaction work that needs flushing out'
572  return (len(self.clients) + len(self.newrels) + len(self.deletions)) > 0
573 
574  @staticmethod
575  def callconstructor(constructor, kwargs):
576  'Call a constructor (or function) in a (hopefully) correct way'
577  try:
578  args, _unusedvarargs, varkw, _unuseddefaults = inspect.getargspec(constructor)
579  except TypeError:
580  args, _unusedvarargs, varkw, _unuseddefaults = inspect.getargspec(constructor.__init__)
581  newkwargs = {}
582  extraattrs = {}
583  if varkw: # Allows any keyword arguments
584  newkwargs = kwargs
585  else: # Only allows some keyword arguments
586  for arg in kwargs:
587  if arg in args:
588  newkwargs[arg] = kwargs[arg]
589  else:
590  extraattrs[arg] = kwargs[arg]
591  ret = constructor(**newkwargs)
592 
593 
594  # Make sure the attributes match the desired values
595  for attr in kwargs:
596  kwa = kwargs[attr]
597  if attr in extraattrs:
598  if not hasattr(ret, attr) or getattr(ret, attr) != kwa:
599  object.__setattr__(ret, attr, kwa)
600  elif not hasattr(ret, attr) or getattr(ret, attr) is None:
601  # If the constructor set this attribute to a value, but it doesn't match the db
602  # then we let it stay as the constructor set it
603  # We gave this value to the constructor as a keyword argument.
604  # Sometimes constructors need to do that...
605  object.__setattr__(ret, attr, kwa)
606  return ret
607 
608  def constructobj(self, constructor, node):
609  'Create/construct an object from a Graph node'
610  kwargs = node.get_properties()
611  #print >> sys.stderr, 'constructobj NODE PROPERTIES', kwargs
612  subj = Store.callconstructor(constructor, kwargs)
613  #print >> sys.stderr, 'constructobj CONSTRUCTED NODE ', subj
614  cls = subj.__class__
615  (index_name, idxkey, idxvalue) = self._get_idx_key_value(cls, {}, subj=subj)
616  if not self.is_uniqueindex(index_name):
617  raise ValueError("Class 'cls' must be a unique indexed class [%s]", cls)
618  local = self._localsearch(cls, idxkey, idxvalue)
619  if local is not None:
620  return local
621  else:
622  self._register(subj, node=node)
623  return subj
624 
625  @staticmethod
626  def _safe_attr_names(subj):
627  'Return the list of supported attribute names from the given object'
628  ret = []
629  for attr in subj.__dict__.keys():
630  if attr[0] == '_':
631  continue
632  ret.append(attr)
633  return ret
634 
635  @staticmethod
636  def safe_attrs(subj):
637  'Return a dictionary of supported attributes from the given object'
638  ret = {}
639  for attr in Store._safe_attr_names(subj):
640  ret[attr] = subj.__dict__[attr]
641  return ret
642 
643  @staticmethod
644  def _proper_attr_value(obj, attr):
645  'Ensure that the value being set is acceptable to neo4j.Node objects'
646  value = getattr(obj, attr)
647  if isinstance(value, (str, unicode, float, int, long, list, tuple)):
648  return value
649  else:
650  print >> sys.stderr, ("Attr %s of object %s of type %s isn't acceptable"
651  % (attr, obj, type(value)))
652  raise ValueError("Attr %s of object %s of type %s isn't acceptable"
653  % (attr, obj, type(value)))
654 
655  @staticmethod
656  def mark_dirty(objself, attr):
657  'Mark the given attribute as dirty in our store'
658  if hasattr(objself, '_Store__store_dirty_attrs'):
659  objself.__store_dirty_attrs[attr] = True
660  objself.__store.clients[objself] = True
661 
662  @staticmethod
663  def _storesetattr(objself, name, value):
664  '''
665  Does a setattr() - and marks changed attributes "dirty". This
666  permits us to know when attributes change, and automatically
667  include them in the next transaction.
668  This is a GoodThing.
669  '''
670 
671  if name[0] != '_':
672  if hasattr(objself, '_Store__store_dirty_attrs'):
673  try:
674  if getattr(objself, name) == value:
675  #print >> sys.stderr, 'Value of %s already set to %s' % (name, value)
676  return
677  except AttributeError:
678  pass
679  if objself.__store.readonly:
680  print >> sys.stderr, ('Caught %s being set to %s!' % (name, value))
681  raise RuntimeError('Attempt to set attribute %s using a read-only store' % name)
682  if hasattr(value, '__iter__') and len(value) == 0:
683  raise ValueError(
684  'Attempt to set attribute %s to empty array (Neo4j limitation)' % name)
685 
686  objself.__store_dirty_attrs[name] = True
687  objself.__store.clients[objself] = True
688  object.__setattr__(objself, name, value)
689 
690  @staticmethod
691  def _update_node_from_obj(subj):
692  'Update the node from its paired object'
693  node = subj.__store_node
694  attrlist = subj.__store_dirty_attrs.keys()
695  for attr in attrlist:
696  node[attr] = Store._proper_attr_value(subj, attr)
697  #print >> sys.stderr, ('SETTING node["%s"] to %s' %
698  # (attr, Store._proper_attr_value(subj, attr)))
699  subj.__store_dirty_attrs = {}
700 
701  def _update_obj_from_node(self, subj):
702  'Update an object from its paired node - preserving "dirty" attributes'
703  node = subj.__store_node
704  nodeprops = node.get_properties()
705  remove_subj = subj not in self.clients
706  for attr in nodeprops.keys():
707  pattr = nodeprops[attr]
708  if attr in subj.__store_dirty_attrs:
709  remove_subj = False
710  continue
711  #print >> sys.stderr, ('Setting obj["%s"] to %s' % (attr, pattr))
712  # Avoid getting it marked as dirty...
713  object.__setattr__(subj, attr, pattr)
714  if remove_subj and subj in self.clients:
715  del self.clients[subj]
716 
717  # Make sure everything in the object is in the Node...
718  for attr in Store._safe_attr_names(subj):
719  if attr not in nodeprops:
720  subj.__store_dirty_attrs[attr] = True
721  self.clients[subj] = True
722 
723  def reset_stats(self):
724  'Reset all our statistical counters and timers'
725  self.stats = {}
726  for statname in ('nodecreate', 'relate', 'separate', 'index', 'attrupdate'
727  , 'index', 'nodedelete', 'addlabels'):
728  self.stats[statname] = 0
729  self.stats['lastcommit'] = None
730  self.stats['totaltime'] = timedelta()
731 
732  def _bump_stat(self, statname, increment=1):
733  'Increment the given statistic by the given increment - default increment is 1'
734  self.stats[statname] += increment
735 
736  def _get_idx_key_value(self, cls, attrdict, subj=None):
737  'Return the appropriate key/value pair for an object of a particular class'
738  kmap = self.classkeymap[cls.__name__]
739  #print ('GET_IDX_KEY_VALUE: attrdict', attrdict)
740  #if subj is not None:
741  #print ('GET_IDX_KEY_VALUE: subj.__dict___', subj.__dict__)
742  if 'kattr' in kmap:
743  kk = kmap['kattr']
744  if hasattr(subj, kk):
745  #print ('SUBJ.__dict__:%s, kk=%s' % (subj.__dict__, kk))
746  key = getattr(subj, kk)
747  else:
748  #print ('ATTRDICT:%s, kk=%s' % (attrdict, kk))
749  key = attrdict[kk]
750  else:
751  key = kmap['key']
752 
753  if 'vattr' in kmap:
754  kv = kmap['vattr']
755  if hasattr(subj, kv):
756  value = getattr(subj, kv)
757  #print ('KV SUBJ.__dict__:%s, kv=%s' % (subj.__dict__, kv))
758  else:
759  #print ('KV ATTRDICT:%s, kv=%s' % (attrdict, kv))
760  value = attrdict[kv]
761  else:
762  value = kmap['value']
763  return (self.classkeymap[cls.__name__]['index'], key, value)
764 
765 
766  def _localsearch(self, cls, idxkey, idxvalue):
767  '''Search the 'client' array and the weaknoderefs to see if we can find
768  the requested object before going to the database'''
769 
770  classname = cls.__name__
771  kmap = self.classkeymap[classname]
772  searchlist = {}
773  if 'kattr' in kmap:
774  searchlist[kmap['kattr']] = idxkey
775  if 'vattr' in kmap:
776  searchlist[kmap['vattr']] = idxvalue
777 
778 
779  searchset = self.clients.keys()
780  for weakclient in self.weaknoderefs.values():
781  client = weakclient()
782  if client is not None and client not in self.clients:
783  searchset.append(client)
784 
785  for client in searchset:
786  if client.__class__ != cls:
787  continue
788  found = True
789  for attr in searchlist.keys():
790  if not hasattr(client, attr) or getattr(client, attr) != searchlist[attr]:
791  found = False
792  break
793  if found:
794  assert hasattr(client, '_Store__store_node')
795  return client
796  return None
797 
798  def _construct_obj_from_node(self, node, cls, clsargs=None):
799  'Construct an object associated with the given node'
800  clsargs = [] if clsargs is None else clsargs
801  # Do we already have a copy of an object that goes with this node somewhere?
802  # If so, we need to update and return it instead of creating a new object
803  nodeid = node._id
804  if nodeid in self.weaknoderefs:
805  subj = self.weaknoderefs[nodeid]()
806  if subj is None:
807  del self.weaknoderefs[nodeid]
808  else:
809  # Yes, we have a copy laying around somewhere - update it...
810  #print >> sys.stderr, ('WE HAVE NODE LAYING AROUND...', node.get_properties())
811  self._update_obj_from_node(subj)
812  return subj
813  #print >> sys.stderr, 'NODE ID: %d, node = %s' % (node._id, str(node))
814  retobj = Store.callconstructor(cls, node.get_properties())
815  for attr in clsargs:
816  if not hasattr(retobj, attr) or getattr(retobj, attr) is None:
817  # None isn't a legal value for Neo4j to store in the database
818  setattr(retobj, attr, clsargs[attr])
819  return self._register(retobj, node=node)
820 
821  def _register(self, subj, node=None, index=None, unique=None, key=None, value=None):
822  'Register this object with a Node, so we can track it for updates, etc.'
823 
824  if not isinstance(subj, object):
825  raise(ValueError('Instances registered with Store class must be subclasses of object'))
826  assert not hasattr(subj, '_Store__store')
827  assert subj not in self.clients
828  self.clients[subj] = True
829  subj.__store = self
830  subj.__store_node = node
831  subj.__store_batchindex = None
832  subj.__store_index = index
833  subj.__store_index_key = key
834  subj.__store_index_value = value
835  subj.__store_index_unique = unique
836  subj.__store_dirty_attrs = {}
837  if subj.__class__ not in self.classes:
838  subj.__class__.__setattr__ = Store._storesetattr
839  self.classes[subj.__class__] = True
840  if node is not None and node.bound:
841  if node._id in self.weaknoderefs:
842  weakling = self.weaknoderefs[node._id]()
843  if weakling is None:
844  del self.weaknoderefs[node._id]
845  else:
846  print >> sys.stderr, ('OOPS! - already here... self.weaknoderefs'
847  , weakling, weakling.__dict__)
848  assert node._id not in self.weaknoderefs or self.weaknoderefs[node._id] is None
849  self.weaknoderefs[node._id] = weakref.ref(subj)
850  if node is not None:
851  if 'post_db_init' in dir(subj):
852  subj.post_db_init()
853  if not node.bound:
854  # Create an event to commemorate the creation of the new database object
855  if AssimEvent.event_observation_enabled:
856  AssimEvent(subj, AssimEvent.CREATEOBJ)
857 
858  return subj
859 
860  def _new_nodes(self):
861  'Return the set of newly created nodes for this transaction'
862  ret = []
863  for client in self.clients:
864  if Store.is_abstract(client) and hasattr(client, '_Store__store_node'):
865  node = client.__store_node
866  ret.append((client, node))
867  return ret
868 
869 
870  @staticmethod
871  def node(subj):
872  'Returns the neo4j.Node associated with the given object'
873  return subj.__store_node
874 
875  #
876  # Except for commit() and abort(), all member functions from here on
877  # construct the batch job from previous requests
878  #
879 
880  def _batch_construct_create_nodes(self):
881  'Construct batch commands for all the new objects in this batch'
882  for pair in self._new_nodes():
883  (subj, node) = pair
884  Store._update_node_from_obj(subj)
885  subj.__store_batchindex = self.batchindex
886  if Store.debug:
887  print >> sys.stderr, ('====== Performing batch.create(%d: %s) - for new node'
888  % (self.batchindex, str(node)))
889  self.batchindex += 1
890  self._bump_stat('nodecreate')
891  self.batch.create(node)
892 
893  def _batch_construct_add_labels(self):
894  'Construct batch commands for all the labels to be added for this batch'
895  for pair in self._new_nodes():
896  (subj, node) = pair
897  self.batchindex += 1
898  cls = subj.__class__
899  if False and hasattr(cls, '__meta_labels__'):
900  print >> sys.stderr, 'ADDING LABELS for', type(subj), cls.__meta_labels__()
901  self._bump_stat('addlabels')
902  self.batch.add_labels(node, cls.__meta_labels__())
903 
904  def _batch_construct_relate_nodes(self):
905  'Construct the batch commands to create the requested relationships'
906  for rel in self.newrels:
907  fromobj = rel['from']
908  toobj = rel['to']
909  fromnode = fromobj.__store_node
910  tonode = toobj.__store_node
911  reltype = rel['type']
912  props = rel['props']
913  if not fromnode.bound:
914  fromnode = fromobj.__store_batchindex
915  if not tonode.bound:
916  tonode = toobj.__store_batchindex
917  if props is None:
918  absrel = neo4j.Relationship(fromnode, reltype, tonode)
919  else:
920  absrel = neo4j.Relationship(fromnode, reltype, tonode, **props)
921  # Record where this relationship will show up in batch output
922  # No harm in remembering this until transaction end...
923  rel['seqno'] = self.batchindex
924  rel['abstract'] = absrel
925  self.batchindex += 1
926  if Store.debug:
927  print >> sys.stderr, ('Performing batch.create(%s): node relationships'
928  % absrel)
929  self._bump_stat('relate')
930  if Store.debug:
931  print >> sys.stderr, ('ADDING rel %s' % absrel)
932  self.batch.create(absrel)
933 
934  def _batch_construct_deletions(self):
935  'Construct batch commands for removing relationships or nodes'
936  delrels = {}
937  delnodes = {}
938  for relorobj in self.deletions:
939  if isinstance(relorobj, neo4j.Relationship):
940  relid = relorobj._id
941  if relid not in delrels:
942  if Store.debug and Store.log:
943  Store.log.debug('DELETING rel %d: %s' % (relorobj._id, relorobj))
944  self._bump_stat('separate')
945  self.batch.delete(relorobj)
946  delrels[relid] = True
947  else:
948  # Then it must be a node-related object...
949  if Store.debug and Store.log:
950  Store.log.debug('DELETING NODE %s: %s' %
951  (str(relorobj.__dict__.keys()), relorobj))
952  node = relorobj.__store_node
953  nodeid = node._id
954  if nodeid in delnodes:
955  continue
956  if nodeid in self.weaknoderefs:
957  del self.weaknoderefs[nodeid]
958  # disconnect it from the database
959  for attr in relorobj.__dict__.keys():
960  if attr.startswith('_Store__store'):
961  delattr(relorobj, attr)
962  if Store.debug and Store.log:
963  Store.log.debug('DELETING node %s' % node)
964  self._bump_stat('nodedelete')
965  self.batch.delete(node)
966  delnodes[relid] = True
967 
968 
969  def _batch_construct_new_index_entries(self):
970  'Construct batch commands for adding newly created nodes to the indexes'
971  for pair in self._new_nodes():
972  subj = pair[0]
973  if subj.__store_index is not None:
974  idx, key, value = self._compute_batch_index(subj)
975  if subj.__store_index_unique:
976  if Store.debug:
977  print >> sys.stderr,('add_to_index[_or_fail]: node %s; index %s("%s","%s")'
978  % (subj.__store_batchindex, idx, key, value))
979  vers = ( int(self.db.neo4j_version[0])*100
980  + int(self.db.neo4j_version[1])*10
981  + int(self.db.neo4j_version[2]))
982  if vers >= 210:
983  # Work around bug in add_to_index_or_fail()...
984  self.batch.add_to_index(neo4j.Node, idx, key, value
985  , subj.__store_batchindex)
986  else:
987  self.batch.add_to_index_or_fail(neo4j.Node, idx, key, value
988  , subj.__store_batchindex)
989 
990  else:
991  if Store.debug:
992  print >> sys.stderr, ('add_to_index: node %s added to index %s(%s,%s)' %
993  (subj.__store_batchindex, idx, key, value))
994  self.batch.add_to_index(neo4j.Node, idx, key, value
995  , subj.__store_batchindex)
996 
997  def _compute_batch_index(self, subj):
998  '''
999  Compute index information for a new node
1000  '''
1001  idx = self.db.legacy.get_index(neo4j.Node, subj.__store_index)
1002  key = subj.__store_index_key
1003  value = subj.__store_index_value
1004  self.index_entry_count += 1
1005  self._bump_stat('index')
1006  return (idx, key, value)
1007 
1008  def _batch_construct_node_updates(self):
1009  'Construct batch commands for updating attributes on "old" nodes'
1010  clientset = {}
1011  for subj in self.clients:
1012  assert subj not in clientset
1013  clientset[subj] = True
1014  node = subj.__store_node
1015  if not node.bound:
1016  continue
1017  for attr in subj.__store_dirty_attrs.keys():
1018  # Each of these items will return None in the HTTP stream...
1019  self.node_update_count += 1
1020  self._bump_stat('attrupdate')
1021  setattr(node, attr, Store._proper_attr_value(subj, attr))
1022  if Store.debug:
1023  print >> sys.stderr, ('Setting property %s of node %d to %s' % (attr
1024  , node._id, Store._proper_attr_value(subj, attr)))
1025  if Store.log:
1026  Store.log.debug('Setting property %s of %d to %s' % (attr
1027  , node._id, Store._proper_attr_value(subj, attr)))
1028  self.batch.set_property(node, attr, Store._proper_attr_value(subj, attr))
1029 
1030  def abort(self):
1031  'Clear out any currently pending transaction work - start fresh'
1032  if self.batch is not None:
1033  self.batch = None
1034  self.batchindex = 0
1035  for subj in self.clients:
1036  subj.__store_dirty_attrs = {}
1037  self.clients = {}
1038  self.newrels = []
1039  self.deletions = []
1040  # Clean out dead node references
1041  for nodeid in self.weaknoderefs.keys():
1042  subj = self.weaknoderefs[nodeid]()
1043  if subj is None or not hasattr(subj, '_Store__store_node'):
1044  del self.weaknoderefs[nodeid]
1045 
1046  def commit(self):
1047  '''Commit all the changes we've created since our last transaction'''
1048  if Store.debug:
1049  print >> sys.stderr, ('COMMITTING THIS THING:', str(self))
1050  self.batch = self.batch if self.batch is not None \
1051  else py2neo.legacy.LegacyWriteBatch(self.db)
1052  self.batchindex = 0
1053  self._batch_construct_create_nodes() # These return new nodes in batch return result
1054  self._batch_construct_relate_nodes() # These return new relationships
1055  self._batch_construct_new_index_entries() # These return the objects indexed
1056  self._batch_construct_node_updates() # These return None
1057  self._batch_construct_add_labels() # Not sure what these return
1058  self._batch_construct_deletions() # These return None
1059  if Store.debug:
1060  print >> sys.stderr, ('Batch Updates constructed: Committing THIS THING:', str(self))
1061  if Store.log:
1062  Store.log.debug('Batch Updates constructed: Committing THIS THING: %s'
1063  % str(self))
1064  start = datetime.now()
1065  try:
1066  submit_results = self.batch.submit()
1067  except GraphError as e:
1068  print >> sys.stderr, ('FAILED TO COMMIT THIS THING:', str(self))
1069  print >> sys.stderr, self
1070  print >> sys.stderr, ('BatchError: %s' % e)
1071  raise e
1072  if Store.debug:
1073  print >> sys.stderr, 'SUBMIT RESULTS FOLLOW:'
1074  for result in submit_results:
1075  print >> sys.stderr, 'SUBMITRESULT:', type(result), result
1076  end = datetime.now()
1077  diff = end - start
1078  self.stats['lastcommit'] = diff
1079  self.stats['totaltime'] += diff
1080 
1081  # Save away (update) any newly created nodes...
1082  for pair in self._new_nodes():
1083  # unused variable
1084  # pylint: disable=W0612
1085  (subj, unused) = pair
1086  index = subj.__store_batchindex
1087  newnode = submit_results[index]
1088  if Store.debug:
1089  print >> sys.stderr, 'LOOKING at new node with batch index %d' % index
1090  print >> sys.stderr, 'NEW NODE looks like %s' % str(newnode)
1091  print >> sys.stderr, 'SUBJ (our copy) looks like %s' % str(subj)
1092  print >> sys.stderr, ('NEONODE (their copy) looks like %d, %s'
1093  % (newnode._id, str(newnode.get_properties())))
1094  # This 'subj' used to have an abstract node, now it's concrete
1095  subj.__store_node = newnode
1096  self.weaknoderefs[newnode._id] = weakref.ref(subj)
1097  for attr in newnode.get_properties():
1098  if not hasattr(subj, attr):
1099  print >> sys.stderr, ("OOPS - we're missing attribute %s" % attr)
1100  elif getattr(subj, attr) != newnode[attr]:
1101  print >> sys.stderr, ("OOPS - attribute %s is %s and should be %s" \
1102  % (attr, getattr(subj, attr), newnode[attr]))
1103  #self.dump_clients()
1104  self.abort()
1105  if Store.debug:
1106  print >> sys.stderr, 'DB TRANSACTION COMPLETED SUCCESSFULLY'
1107  return submit_results
1108 
1109  def clean_store(self):
1110  '''Clean out all the objects we used to have in our store - afterwards we
1111  have none associated with this Store'''
1112  for nodeid in self.weaknoderefs:
1113  obj = self.weaknoderefs[nodeid]()
1114  if obj is not None:
1115  for attr in obj.__dict__.keys():
1116  if attr.startswith('_Store__store'):
1117  delattr(obj, attr)
1118  self.weaknoderefs = {}
1119  self.abort()
1120 
1121 if __name__ == "__main__":
1122  #pylint: disable=C0413
1123  from cmadb import Neo4jCreds
1124  # I'm not too concerned about this test code...
1125  # R0914:923,4:testme: Too many local variables (17/15)
1126  # pylint: disable=R0914
1127  def testme():
1128  'A little test code...'
1129 
1130  # Must be a subclass of 'object'...
1131  # pylint: disable=R0903
1132  class Drone(object):
1133  'This is a Class docstring'
1134  def __init__(self, a=None, b=None, name=None):
1135  'This is a doc string'
1136  self.a = a
1137  self.b = b
1138  self.name = name
1139  def foo_is_blacklisted(self):
1140  'This is a doc string too'
1141  return 'a=%s b=%s name=%s' % (self.a, self.b, self.name)
1142 
1143  @classmethod
1144  def __meta_labels__(cls):
1145  return ['Class_%s' % cls.__name__]
1146 
1147  Neo4jCreds().authenticate()
1148  ourdb = neo4j.Graph()
1149  ourdb.legacy.get_or_create_index(neo4j.Node, 'Drone')
1150  dbvers = ourdb.neo4j_version
1151  # Clean out the database
1152  if dbvers[0] >= 2:
1153  qstring = 'match (n) optional match (n)-[r]-() delete n,r'
1154  else:
1155  qstring = 'start n=node(*) match n-[r?]-() delete n,r'
1156  ourdb.cypher.run(qstring)
1157  # Which fields of which types are used for indexing
1158  classkeymap = {
1159  Drone: # this is for the Drone class
1160  {'index': 'Drone', # The index name for this class is 'Drone'
1161  'key': 'Drone', # The key field is a constant - 'Drone'
1162  'vattr': 'name' # The value field is an attribute - 'name'
1163  }
1164  }
1165  # uniqueindexmap and classkeymap are optional, but make save() much more convenient
1166 
1167  store = Store(ourdb, uniqueindexmap={'Drone': True}, classkeymap=classkeymap)
1168  DRONE = 'Drone121'
1169 
1170  # Construct an initial Drone
1171  # fred = Drone(a=1,b=2,name=DRONE)
1172  # store.save(fred) # Drone is a 'known' type, so we know which fields are index key(s)
1173  #
1174  # load_or_create() is the preferred way to create an object...
1175  #
1176  fred = store.load_or_create(Drone, a=1, b=2, name=DRONE)
1177 
1178  assert fred.a == 1
1179  assert fred.b == 2
1180  assert fred.name == DRONE
1181  assert not hasattr(fred, 'c')
1182  # Modify some fields -- add some...
1183  fred.a = 52
1184  fred.c = 3.14159
1185  assert fred.a == 52
1186  assert fred.b == 2
1187  assert fred.name == DRONE
1188  assert fred.c > 3.14158 and fred.c < 3.146
1189  # Create some relationships...
1190  rellist = ['ISA', 'WASA', 'WILLBEA']
1191  for rel in rellist:
1192  store.relate(fred, rel, fred)
1193  # These should have no effect - but let's make sure...
1194  for rel in rellist:
1195  store.relate_new(fred, rel, fred)
1196  store.commit() # The updates have been captured...
1197  print >> sys.stderr, ('Statistics:', store.stats)
1198 
1199  assert fred.a == 52
1200  assert fred.b == 2
1201  assert fred.name == DRONE
1202  assert fred.c > 3.14158 and fred.c < 3.146
1203 
1204  #See if the relationships 'stuck'...
1205  for rel in rellist:
1206  ret = store.load_related(fred, rel, Drone)
1207  ret = [elem for elem in ret]
1208  assert len(ret) == 1 and ret[0] is fred
1209  for rel in rellist:
1210  ret = store.load_in_related(fred, rel, Drone)
1211  ret = [elem for elem in ret]
1212  assert len(ret) == 1 and ret[0] is fred
1213  assert fred.a == 52
1214  assert fred.b == 2
1215  assert fred.name == DRONE
1216  assert fred.c > 3.14158 and fred.c < 3.146
1217  print >> sys.stderr, (store)
1218  assert not store.transaction_pending
1219 
1220  #Add another new field
1221  fred.x = 'malcolm'
1222  store.dump_clients()
1223  print >> sys.stderr, ('store:', store)
1224  assert store.transaction_pending
1225  store.commit()
1226  print >> sys.stderr, ('Statistics:', store.stats)
1227  assert not store.transaction_pending
1228  assert fred.a == 52
1229  assert fred.b == 2
1230  assert fred.name == DRONE
1231  assert fred.c > 3.14158 and fred.c < 3.146
1232  assert fred.x == 'malcolm'
1233 
1234  # Check out load_indexed...
1235  newnode = store.load_indexed('Drone', 'Drone', fred.name, Drone)[0]
1236  print >> sys.stderr, ('LoadIndexed NewNode: %s %s' % (newnode, store.safe_attrs(newnode)))
1237  # It's dangerous to have two separate objects which are the same thing be distinct
1238  # so we if we fetch a node, and one we already have, we get the original one...
1239  assert fred is newnode
1240  if store.transaction_pending:
1241  print >> sys.stderr, ('UhOh, we have a transaction pending.')
1242  store.dump_clients()
1243  assert not store.transaction_pending
1244  assert newnode.a == 52
1245  assert newnode.b == 2
1246  assert newnode.x == 'malcolm'
1247  store.separate(fred, 'WILLBEA')
1248  assert store.transaction_pending
1249  store.commit()
1250  print >> sys.stderr, ('Statistics:', store.stats)
1251 
1252  # Test a simple cypher query...
1253  qstr = "START d=node:Drone('*:*') RETURN d"
1254  qnode = store.load_cypher_node(qstr, Drone) # Returns a single node
1255  print >> sys.stderr, ('qnode=%s' % qnode)
1256  assert qnode is fred
1257  qnodes = store.load_cypher_nodes(qstr, Drone) # Returns iterable
1258  qnodes = [qnode for qnode in qnodes]
1259  assert len(qnodes) == 1
1260  assert qnode is fred
1261 
1262  # See if the now-separated relationship went away...
1263  rels = store.load_related(fred, 'WILLBEA', Drone)
1264  rels = [rel for rel in rels]
1265  assert len(rels) == 0
1266  store.refresh(fred)
1267  store.delete(fred)
1268  assert store.transaction_pending
1269  store.commit()
1270 
1271  # When we delete an object from the database, the python object
1272  # is disconnected from the database...
1273  assert not hasattr(fred, '_Store__store_node')
1274  assert not store.transaction_pending
1275 
1276  print >> sys.stderr, ('Statistics:', store.stats)
1277  print >> sys.stderr, ('Final returned values look good!')
1278 
1279 
1280  Store.debug = False
1281  testme()
def node(subj)
Definition: store.py:871
def load_cypher_query(self, querystr, clsfact, params=None, maxcount=None)
Definition: store.py:525
def is_uniqueindex(self, index_name)
Definition: store.py:228
def save_indexed(self, index_name, key, value, subj)
Definition: store.py:263
def abort(self)
Definition: store.py:1030
def _batch_construct_node_updates(self)
Definition: store.py:1008
def _update_obj_from_node(self, subj)
Definition: store.py:701
def _batch_construct_add_labels(self)
Definition: store.py:893
def load_or_create(self, cls, clsargs)
Definition: store.py:386
def save_unique(self, index_name, key, value, subj)
Definition: store.py:273
def _batch_construct_create_nodes(self)
Definition: store.py:880
def relate_new(self, subj, rel_type, obj, properties=None)
Definition: store.py:409
def id(subj)
Definition: store.py:200
def separate_in(self, subj, rel_type=None, obj=None)
Definition: store.py:444
def relate(self, subj, rel_type, obj, properties=None)
Definition: store.py:396
def load_related(self, subj, rel_type, cls)
Definition: store.py:459
def load_cypher_node(self, query, cls, params=None)
Definition: store.py:517
def load_in_related(self, subj, rel_type, cls)
Definition: store.py:473
def _batch_construct_relate_nodes(self)
Definition: store.py:904
def delete(self, subj)
Definition: store.py:314
def load(self, cls, clsargs)
Definition: store.py:354
def _construct_obj_from_node(self, node, cls, clsargs=None)
Definition: store.py:798
def _batch_construct_new_index_entries(self)
Definition: store.py:969
def bound(subj)
Definition: store.py:224
def _register(self, subj, node=None, index=None, unique=None, key=None, value=None)
Definition: store.py:821
def commit(self)
Definition: store.py:1046
def _batch_construct_deletions(self)
Definition: store.py:934
ResourceCmd *(* constructor)(guint structsize, ConfigContext *request, gpointer user_data, ResourceCmdCallback callback)
Definition: resourcecmd.c:46
def testme()
Definition: store.py:1127
def _localsearch(self, cls, idxkey, idxvalue)
Definition: store.py:766
def dump_clients(self)
Definition: store.py:235
def _get_idx_key_value(self, cls, attrdict, subj=None)
Definition: store.py:736
def load_cypher_nodes(self, querystr, cls, params=None, maxcount=None, debug=False)
Definition: store.py:484
def getstore(subj)
Definition: store.py:212
def separate(self, subj, rel_type=None, obj=None)
Definition: store.py:425
def save(self, subj, node=None)
Definition: store.py:279
def _new_nodes(self)
Definition: store.py:860
def refresh(self, subj)
Definition: store.py:327
def mark_dirty(objself, attr)
Definition: store.py:656
def has_node(subj)
Definition: store.py:207
def fmt_dirty_attrs(self)
Definition: store.py:245
def load_indexed(self, index_name, key, value, cls)
Definition: store.py:333
def __str__(self)
Definition: store.py:169
def __init__(self, db, uniqueindexmap=None, classkeymap=None, readonly=False)
Definition: store.py:121
def lucene_escape(query)
Definition: store.py:195
def is_abstract(subj)
Definition: store.py:217
def _yielded_value(self, value, clsfact)
Definition: store.py:549
def constructobj(self, constructor, node)
Definition: store.py:608
def reset_stats(self)
Definition: store.py:723
def clean_store(self)
Definition: store.py:1109
def transaction_pending(self)
Definition: store.py:570
def _compute_batch_index(self, subj)
Definition: store.py:997
def _bump_stat(self, statname, increment=1)
Definition: store.py:732
def safe_attrs(subj)
Definition: store.py:636
def callconstructor(constructor, kwargs)
Definition: store.py:575