How to link foreign keys & primary keys using python?
sonal
sonaldgr8 at gmail.com
Tue Jun 13 02:29:19 EDT 2006
Hi Mr. Steve,
The *indexes* i am using are lists...
The code for creation of the PartionedPK is given below...
*******************************************************************************
class PartitionedPK(object):
def __init__(self, name, save_to, fields):
self.name = name
self.idx_name = save_to
self.part_name = save_to + PARTITION_SUFFIX
self.fields = fields
self.digester = sha.new
def setup(self, schema):
self.partitions = [[] for i in range(256)]
self.flush_pos = [[] for i in range(256)]
self.flush_count = 0
self.index = {}
self.offsets = field_offsets(self.fields, schema)
if not self.offsets:
raise ValueError('One or more index field names are
invalid')
self.idx_file = open(self.idx_name, 'wb+')
self.part_file = open(self.part_name, 'wb+')
def save(self):
pickle.dump(self.flush_count, self.part_file, -1)
pickle.dump(self.flush_pos, self.part_file, -1)
self.idx_file.close()
self.part_file.close()
def flush(self):
self.flush_count += 1
for i in range(256):
self.flush_pos[i].append(self.idx_file.tell())
pickle.dump(self.partitions[i], self.idx_file, -1)
self.partitions[i] = []
def valid(self, record, data):
key = self.digester(''.join( [data[i] for i in self.offsets]
)).digest()
self.partitions[ ord(key[0]) ].append( (key, record) )
# defer checking till later
return True
def finalize(self):
self.flush()
errors = []
for bin in range(256):
#show('Checking %s, bin %d/256 ... ' % (self.name, bin))
seen = {}
has = seen.has_key
for flush in range(self.flush_count):
self.idx_file.seek( self.flush_pos[bin][flush] )
records = pickle.load(self.idx_file)
for key, value in records:
if has(key):
errors.append(value)
else:
seen[key] = value
return errors
*******************************************************************************
the PK definition is as follows:
vol_pk = PartitionedPK( name = 'VOL_PK',
save_to = '../Index/vol.idx',
fields = ['ID','Type','Curr_Code','Tenor'])
The code for the Foreign Key declaration (referencing to the indexes)
is as given below...
*******************************************************************************
class HashedFK(object):
def __init__(self, name, load_from, fields):
self.name = name
self.filename = load_from
self.fields = fields
self.digester = sha.new
def setup(self, schema):
self.index = {}
self.offsets = field_offsets(self.fields, schema)
if not self.offsets:
raise ValueError('One or more index field names are
invalid')
file = open(self.filename, 'rb+')
self.index = pickle.load(file)
file.close()
def valid(self, record, fields):
key = self.digester(''.join( [fields[i] for i in self.offsets]
)).digest()
return self.index.has_key(key)
def flush(self):
pass
def finalize(self):
return None
*******************************************************************************
the FK definition is as follows:
vol_fk = HashedFK( name = ' VOL_FK,
load_from = '../Index/vol.idx',
fields= ['ID','Type','Curr_Code','Tenor'])
The code is working fine when the foreign key is referenced to the
complete primary key
But if the FK were to be on only 'ID'
the FK defn would have been like =>
vol_fk = HashedFK( name = ' VOL_FK,
load_from = '../Index/vol.idx',
fields = ['ID'] )
This is were the problem lies...
it shows AttributeError: 'list' object has no attribute 'has_key'
I have also tried defining another PK with a single field as follows =>
Tvol_pk = PartitionedPK( name = 'TVOL_PK',
save_to = '../Index/tvol.idx',
fields = ['ID'] )
The index ''tvol.idx'' is being created at the given location(path
specified)
but referencing to this index(i.e., tvol.idx) with the vol_fk given
above also gives
the same error.
More information about the Python-list
mailing list