# end of variables dependent on EVERGREEN_SERVER
# ----------------------------------------------------------------------
- # BIB_PART_MERGE: display multiple parts for one bib title
- # that have been scanned in separately in one section
- BIB_PART_MERGE = bool(getattr(settings, 'BIB_PART_MERGE', True))
-
# OPAC_LANG and OPAC_SKIN: localization skinning for your OPAC
OPAC_LANG = getattr(settings, 'EVERGREEN_OPAC_LANG', 'en-CA')
# the given item.
RESERVES_DESK_NAME = getattr(settings, 'RESERVES_DESK_NAME', None)
+ BIB_PART_MERGE = getattr(settings, 'BIB_PART_MERGE', None)
+ PART_MERGE = getattr(settings, 'PART_MERGE', None)
- # BIB_PART_MERGE: if True, merge parts under one title
- BIB_PART_MERGE = getattr(settings, 'BIB_PART_MERGE', False)
-
# USE_Z3950: if True, use Z39.50 for catalogue search; if False, use OpenSRF.
# Don't set this value directly here: rather, if there is a valid Z3950_CONFIG
# settings in local_settings.py, then Z39.50 will be used.
return objset
+ #is current barcode one of identified sets
def collect_set(barcode,bcs,ids):
bc_dups = []
id_dups = []
#syrup tries to store as little as possible about an
#item, which leads to a lot of hoops when combining
#volumes/parts
- def get_copydetails(barcode,copyids,reserves_loc,bcs,ids):
+ def get_copydetails(barcode,copyids,reserves_loc,bib_part_flag,part_flag,bcs,ids):
+ copies_desk = len(copyids)
+ copies_lib = copies_desk
copy_list = []
+ lib_parts_list = []
+ res_parts_list = []
+ copy_sort = None
+ part_sort = None
+ part_label = ''
bcs_set, ids_set = collect_set(barcode,bcs,ids)
for copyid in copyids:
thisloc = thisloc.get("name")
#create copy object for supplied barcode - will be all barcodes if none supplied
- if thisloc in reserves_loc and (barcode==circbarcode or circbarcode in bcs_set):
+ if part_flag:
circ_modifier = circinfo.get("circ_modifier")
circs = circinfo.get("circulations")
parts = circinfo.get("parts")
- part_label = ''
- part_sort = None
part = None
if parts:
part = parts[0]
- if part:
+ if part and (bib_part_flag or part_flag):
part_label = part.get("label")
part_sort = part.get("label_sortkey")
+ if thisloc in reserves_loc:
+ res_parts_list.append(part_sort)
+ lib_parts_list.append(part_sort)
+
+ if thisloc in reserves_loc and (barcode==circbarcode or circbarcode in bcs_set):
+ if part_sort is not None and copy_sort is None:
+ copy_sort = part_sort
id_ind = -1
if circbarcode in bcs_set:
id_ind = ids_set[bcs_set.index(circbarcode)]
copy_list.append(copy_obj(circ_modifier,circs,part_label,part_sort,id_ind))
- return sorted(copy_list, key=lambda copy: copy.part_sort)
+ desk_count = len(res_parts_list)
+ lib_count = len(lib_parts_list)
+ if part_flag and copy_sort is not None:
+ desk_count = res_parts_list.count(copy_sort)
+ lib_count = lib_parts_list.count(copy_sort)
+
+ return sorted(copy_list, key=lambda copy: copy.part_sort),desk_count,lib_count
#deal with call numbers that have embedded parts - ugh!
def get_dueinfo(callprefix,callsuffix,callno,earliestdue,attachtest,voltest,sort_callno,
return last_call, last_vol
+ #make sure request call number is limited to unique entries
+ def callparts(callno,status,syrup_id,label,allcalls):
+ partcalls = allcalls
+ if len(partcalls) > 0:
+ multipart = allcalls[len(partcalls) - 1]
+ if callno != multipart[0] and label != multipart[3]:
+ partcalls.append([callno,status,syrup_id,label])
+ else:
+ partcalls.append([callno,status,syrup_id,label])
+
+ return partcalls
+
#use counts from system if not parts
def get_desk_counts(counts):
desk_count = 0
callprefix,callsuffix,callno,voltest,attachtest,vol)
if version >= 2.1:
+ #oh-oh, something inconsistent on multiple calls here
+ #print "called", [prefix,sort_callno,suffix]
+ #print "org", org
+ #print "==>", E1(OPENSRF_CN_CALL, bib_id, [prefix,sort_callno,suffix], org)
copyids = E1(OPENSRF_CN_CALL, bib_id, [prefix,sort_callno,suffix], org)
else:
copyids = E1(OPENSRF_CN_CALL, bib_id, sort_callno, org)
#get copy information
- copies = get_copydetails(barcode,copyids,self.RESERVES_DESK_NAME,bcs,ids)
+ copies, desk, lib = get_copydetails(barcode,copyids,self.RESERVES_DESK_NAME,
+ self.BIB_PART_MERGE,self.PART_MERGE,bcs,ids)
- desk = get_desk_counts(counts)
- if barcode:
- desk = len(copies)
+ if desk==0:
+ desk = get_desk_counts(counts)
avail = desk
copy_parts = []
# we want to identify the copy that will be returned first if
# all are checked out
for copy in copies:
- #this condition should only ever be true when a multipart is in full display
- #in that case, the most available copy should be selected
- if len(ids) == 1:
- if ids[0][0] == '':
- avail = 1
if copy.part_label:
- #print "callno", callno
- #print "sort_callno", sort_callno
-
callno = sort_callno + " " + copy.part_label
if copy.part_sort in copy_parts and len(copy_parts) > 0:
#leave alone if locked - otherwise mark as ready
allcalls[len(allcalls) - 1] = [callno,READY,copy.syrup_id,copy.part_label]
else:
- allcalls.append([callno,READY,copy.syrup_id,copy.part_label])
+ allcalls = callparts(callno,READY,copy.syrup_id,copy.part_label,allcalls)
copy_parts.append(copy.part_sort)
bringfw = attachtest
if copy.circs and isinstance(copy.circs, list):
if (earliestdue is None or duetime < earliestdue):
- #print "SETTING earliest to", duetime
earliestdue = duetime
dueinfo = time.strftime(self.DUE_FORMAT,earliestdue)
#will want the link to be to the earliest item if not multipart
from collections import defaultdict
from conifer.libsystems import marcxml as MX
+from conifer.libsystems.evergreen.support import E1
from conifer.plumbing.genshi_support import get_request
from conifer.plumbing.hooksystem import *
from datetime import datetime, timedelta, date
# refer to static values in the module.
integration_class = None
+OPENSRF_BARCODE = "open-ils.search.asset.copy.fleshed2.find_by_barcode"
if hasattr(settings, 'INTEGRATION_CLASS'):
modname, klassname = settings.INTEGRATION_CLASS.rsplit('.', 1) # e.g. 'foo.bar.baz.MyClass'
mod = __import__(modname, fromlist=[''])
integration_class = getattr(mod, klassname)
-BIB_PART_MERGE = bool(getattr(settings, 'BIB_PART_MERGE', True))
+BIB_PART_MERGE = bool(getattr(settings, 'BIB_PART_MERGE', False))
+PART_MERGE = bool(getattr(settings, 'PART_MERGE', True))
#----------------------------------------------------------------------
return True
return False
- #collect barcodes for dups
+ def get_label(bc):
+ partlabel = None
+ copyinfo = E1(OPENSRF_BARCODE, bc)
+ parts = copyinfo.get("parts")
+ if parts:
+ part = parts[0]
+ if part:
+ partlabel = part['label']
+ return partlabel
+
+ #def parts_match(bc1,bc2,labels):
+ def parts_match(bc1,bc2):
+ label1 = get_label(bc1)
+ if label1 is not None:
+ label2 = get_label(bc2)
+ if label1 == label2:
+ #add label to labels
+ return True
+ return False
+
+ #collect barcodes for titles with same bib id
def deal_with_dups(item,items,edit_status,barcodes):
dup_barcodes = []
dup_ids = []
if item.item_type == 'HEADING':
return push_thru, dup_barcodes, dup_ids
- if not BIB_PART_MERGE or edit_status:
+ if (not BIB_PART_MERGE and not PART_MERGE) or edit_status:
return push_thru, dup_barcodes, dup_ids
if not is_dup_candidate(item):
return push_thru, dup_barcodes, dup_ids
if is_dup_candidate(display_item):
if display_item.barcode != item.barcode:
if display_item.bib_id == item.bib_id and display_item.barcode not in dup_barcodes:
- dup_barcodes.append(display_item.barcode)
- dup_ids.append(display_item.id)
+ #if BIB_PART_MERGE or (PART_MERGE and parts_match(item.barcode,display_item.barcode,part_labels)):
+ if BIB_PART_MERGE or (PART_MERGE and parts_match(item.barcode,display_item.barcode)):
+ dup_barcodes.append(display_item.barcode)
+ dup_ids.append(display_item.id)
+ #if added, make sure original is there
if len(dup_barcodes) > 0 and not item.barcode in dup_barcodes:
dup_barcodes.append(item.barcode)
dup_ids.append(item.id)
+ #sort out based on part_labels
return push_thru, dup_barcodes, dup_ids
- # walk the tree
+ # walk the tree - if bib or part merge flag, collect ids & barcodes for
+ # same bib or part, and only pass one instance onwards
out = []
- out_barcodes = []
- out_ids = []
def walk(parent, accum):
+ out_barcodes = []
+ out_ids = []
here = dct.get(parent, [])
for item in here:
sub = []
walk(item, sub)
push_thru, bib_barcodes, syrup_ids = deal_with_dups(item,items,edit_status,out_barcodes)
- if len(bib_barcodes) > 0:
- out_barcodes.append(bib_barcodes)
- out_ids.append(syrup_ids)
- if push_thru:
+ if len(bib_barcodes) > 0 and bib_barcodes not in accum:
+ if bib_barcodes not in out_barcodes:
+ out_barcodes.append(bib_barcodes)
+ out_ids.append(syrup_ids)
+ if push_thru and bib_barcodes not in accum:
accum.append((item, sub, out_barcodes, out_ids))
walk(subtree, out)
return out