my $cstore = OpenSRF::AppSession->create('open-ils.cstore');
my $copy_rec = $cstore->request(
'open-ils.cstore.json_query.atomic',
- $self->mk_copy_query($rec_id, $org, $copy_depth, $copy_limit, $copy_offset, $pref_ou)
+ $self->mk_copy_query($rec_id, undef, $org, $copy_depth, $copy_limit, $copy_offset, $pref_ou)
);
# find foreign copy data
$ctx->{have_holdings_to_show} = 0;
$ctx->{have_mfhd_to_show} = 0;
- $self->get_hold_copy_summary($rec_id, $org);
+ $self->get_hold_copy_summary($rec_id, undef, $org);
$self->timelog("past get_hold_copy_summary()");
$self->ctx->{bib_is_dead} = OpenILS::Application::AppUtils->is_true(
sub mk_copy_query {
my $self = shift;
my $rec_id = shift;
+ my $mmr_id = shift;
my $org = shift;
my $depth = shift;
my $copy_limit = shift;
my $pref_ou = shift;
my $query = $U->basic_opac_copy_query(
- $rec_id, undef, undef, $copy_limit, $copy_offset, $self->ctx->{is_staff}
+ $rec_id, $mmr_id, undef, undef,
+ $copy_limit, $copy_offset, $self->ctx->{is_staff}
);
if($org != $self->ctx->{aou_tree}->()->id) {
}
sub get_hold_copy_summary {
- my ($self, $rec_id, $org) = @_;
+ my ($self, $rec_id, $mmr_id, $org) = @_;
my $ctx = $self->ctx;
+
+ $rec_id = $mmr_id;
my $search = OpenSRF::AppSession->create('open-ils.search');
- my $copy_count_meth = 'open-ils.search.biblio.record.copy_count';
+ my $copy_count_meth = $mmr_id ?
+ 'open-ils.search.biblio.metarecord.copy_count' :
+ 'open-ils.search.biblio.record.copy_count';
+
# We want to include OPAC-invisible copies in a staff context
if ($ctx->{is_staff}) {
$copy_count_meth .= '.staff';
$org = $ctx->{get_aou}->($org)->parent_ou;
}
+ my $hmeth = $mmr_id ?
+ 'open-ils.circ.mmr.holds.count' :
+ 'open-ils.circ.bre.holds.count';
+
$self->ctx->{record_hold_count} = $U->simplereq(
- 'open-ils.circ', 'open-ils.circ.bre.holds.count',
- $rec_id, $count_args);
+ 'open-ils.circ', $hmeth, $rec_id, $count_args);
$self->ctx->{copy_summary} = $req1->recv->content;
)[0];
}
+sub load_metarecord {
+ my $self = shift;
+ my %kwargs = @_;
+ my $ctx = $self->ctx;
+ $ctx->{page} = 'metrecord';
+ my $e = OpenILS::Utils::CStoreEditor->new;
+
+ $self->timelog("load_metarecord() began");
+
+ my $mmr_id = $ctx->{page_args}->[0];
+ return Apache2::Const::HTTP_BAD_REQUEST
+ unless $mmr_id and $mmr_id =~ /^\d+$/;
+
+ my $mmr = $e->retrieve_metabib_metarecord($mmr_id)
+ or return Apache2::Const::HTTP_BAD_REQUEST; # bad ID
+
+ # some data can be fetched directly via the master record
+ my $rec_id = $mmr->master_record;
+
+ $self->added_content_stage1($rec_id);
+ $self->timelog("past added content stage 1");
+
+ my $org = $self->_get_search_lib();
+ my $org_name = $ctx->{get_aou}->($org)->shortname;
+ my $pref_ou = $self->_get_pref_lib();
+ my $depth = $self->cgi->param('depth');
+ $depth = $ctx->{get_aou}->($org)->ou_type->depth
+ unless defined $depth; # can be 0
+
+ my $copy_depth = $self->cgi->param('copy_depth');
+ $copy_depth = $depth unless defined $copy_depth; # can be 0
+ $self->ctx->{copy_depth} = $copy_depth;
+
+ my $copy_limit = int($self->cgi->param('copy_limit') || 10);
+ my $copy_offset = int($self->cgi->param('copy_offset') || 0);
+
+ $self->get_staff_search_settings;
+ if ($ctx->{staff_saved_search_size}) {
+ $ctx->{saved_searches} = ($self->staff_load_searches)[1];
+ }
+ $self->timelog("past staff saved searches");
+
+# TODO
+# $self->fetch_related_search_info($rec_id) unless $kwargs{no_search};
+# $self->timelog("past related search info");
+
+ # Check for user and load lists and prefs
+ if ($self->ctx->{user}) {
+ $self->_load_lists_and_settings;
+ $self->timelog("load user lists and settings");
+ }
+
+ # run copy retrieval in parallel to bib retrieval
+ my $cstore = OpenSRF::AppSession->create('open-ils.cstore');
+ my $copy_rec = $cstore->request(
+ 'open-ils.cstore.json_query.atomic',
+ $self->mk_copy_query(undef, $mmr_id, $org,
+ $copy_depth, $copy_limit, $copy_offset, $pref_ou)
+ );
+
+# TODO
+ # find foreign copy data
+# my $peer_rec = $U->simplereq(
+# 'open-ils.search',
+# 'open-ils.search.peer_bibs', $rec_id );
+#
+# $ctx->{foreign_copies} = $peer_rec;
+
+ my (undef, @rec_data) = $self->get_records_and_facets([$mmr_id], undef, {
+ flesh => '{holdings_xml,bmp,mra,acp,acnp,acns}',
+ metarecord => 1,
+ site => $org_name,
+ depth => $depth,
+ pref_lib => $pref_ou
+ });
+
+ $self->timelog("past get_records_and_facets()");
+ $ctx->{bre_id} = $mmr_id;
+ $ctx->{marc_xml} = $rec_data[0]->{marc_xml};
+ $ctx->{copies} = $copy_rec->gather(1);
+
+ # TODO move me to a standalone function
+
+ # Add public copy notes to each copy - and while we're in there, grab peer bib records
+# my %cached_bibs = ();
+# foreach my $copy (@{$ctx->{copies}}) {
+# $copy->{notes} = $U->simplereq(
+# 'open-ils.circ',
+# 'open-ils.circ.copy_note.retrieve.all',
+# {itemid => $copy->{id}, pub => 1 }
+# );
+# $self->timelog("past copy note retrieval call");
+# $copy->{peer_bibs} = $U->simplereq(
+# 'open-ils.search',
+# 'open-ils.search.multi_home.bib_ids.by_barcode',
+# $copy->{barcode}
+# );
+# $self->timelog("past peer bib id retrieval");
+# my @peer_marc;
+# foreach my $bib (@{$copy->{peer_bibs}}) {
+# next if $bib eq $ctx->{bre_id};
+# next if $cached_bibs{$bib};
+# my (undef, @peer_data) = $self->get_records_and_facets(
+# [$bib], undef, {
+# flesh => '{}',
+# site => $org_name,
+# depth => $depth,
+# pref_lib => $pref_ou
+# });
+# $cached_bibs{$bib} = 1;
+# #$copy->{peer_bib_marc} = $peer_data[0]->{marc_xml};
+# push @peer_marc, $peer_data[0]->{marc_xml};
+# $self->timelog("fetched peer bib record $bib");
+# }
+# $copy->{peer_bib_marc} = \@peer_marc;
+# }
+
+ $self->timelog("past store copy retrieval call");
+ $ctx->{copy_limit} = $copy_limit;
+ $ctx->{copy_offset} = $copy_offset;
+
+ $ctx->{have_holdings_to_show} = 0;
+ $ctx->{have_mfhd_to_show} = 0;
+
+# TODO
+ $self->get_hold_copy_summary(undef, $mmr_id, $org);
+
+ $self->timelog("past get_hold_copy_summary()");
+ $self->ctx->{bib_is_dead} = OpenILS::Application::AppUtils->is_true(
+ OpenILS::Utils::CStoreEditor->new->json_query({
+ select => { bre => [ 'deleted' ] },
+ from => 'bre',
+ where => { 'id' => $rec_id }
+ })->[0]->{deleted}
+ );
+
+ $cstore->kill_me;
+
+# if (
+# $ctx->{get_org_setting}->
+# ($org, "opac.fully_compressed_serial_holdings")
+# ) {
+# # We're loading this data here? Are we therefore assuming that we
+# # *are* going to display something in the "issues" expandy?
+# $self->load_serial_holding_summaries($rec_id, $org, $copy_depth);
+# } else {
+# $ctx->{mfhd_summaries} =
+# $self->get_mfhd_summaries($rec_id, $org, $copy_depth);
+#
+# if ($ctx->{mfhd_summaries} && scalar(@{$ctx->{mfhd_summaries}})
+# ) {
+# $ctx->{have_mfhd_to_show} = 1;
+# };
+# }
+
+ $self->timelog("past serials holding stuff");
+
+ my %expandies = (
+ marchtml => sub {
+ $ctx->{marchtml} = $self->mk_marc_html($rec_id);
+ },
+ issues => sub {
+ return;
+ # XXX this needed?
+ },
+ cnbrowse => sub {
+ $self->prepare_browse_call_numbers();
+ }
+ );
+
+ my @expand = $self->cgi->param('expand');
+ if (grep {$_ eq 'all'} @expand) {
+ $ctx->{expand_all} = 1;
+ $expandies{$_}->() for keys %expandies;
+
+ } else {
+ for my $exp (@expand) {
+ $ctx->{"expand_$exp"} = 1;
+ $expandies{$exp}->() if exists $expandies{$exp};
+ }
+ }
+
+ $self->timelog("past expandies");
+
+ $self->added_content_stage2($rec_id);
+
+ $self->timelog("past added content stage 2");
+
+ return Apache2::Const::OK;
+}
+
+
+
1;
$unapi_args->{depth} ||= $self->ctx->{aou_tree}->()->ou_type->depth;
$unapi_args->{flesh_depth} ||= 5;
+ my $is_meta = delete $unapi_args->{metarecord};
+ my $unapi_type = $is_meta ? 'unapi.mmr' : 'unapi.bre';
+
$unapi_cache ||= OpenSRF::Utils::Cache->new('global');
my $unapi_cache_key_suffix = join(
'_',
+ $is_meta || 0,
$unapi_args->{site},
$unapi_args->{depth},
$unapi_args->{flesh_depth},
$outer_self->timelog("get_records_and_facets(): got response content");
# Protect against requests for non-existent records
- return unless $data->{'unapi.bre'};
+ return unless $data->{$unapi_type};
+
+ my $xml = XML::LibXML->new->parse_string($data->{$unapi_type})->documentElement;
- my $xml = XML::LibXML->new->parse_string($data->{'unapi.bre'})->documentElement;
+ # NOTE: in metarecord mode, the bre_id will be that of the master
+ # record, in which case the bre_id acts as a secondary identifier
+ # for the metarecord. Caching via $bre_id (w/ is_meta key_suffix
+ # adjustments from above) should still produce unique cache
+ # values for each MR. IOW, there' no particular need to extract
+ # the MR id.
$outer_self->timelog("get_records_and_facets(): parsed xml");
# Protect against legacy invalid MARCXML that might not have a 901c
my $key = 'TPAC_unapi_cache_'.$bre_id.'_'.$unapi_cache_key_suffix;
my $cache_data = $unapi_cache->get_cache($key);
if ($$cache_data{running}) {
- $unapi_cache->put_cache($key, { id => $bre_id, marc_xml => $data->{'unapi.bre'} }, 10);
+ $unapi_cache->put_cache($key, { id => $bre_id, marc_xml => $data->{$unapi_type} }, 10);
}
}
}
);
- $self->timelog("get_records_and_facets(): about to call unapi.bre via json_query (rec_ids has " . scalar(@$rec_ids));
+ $self->timelog("get_records_and_facets(): about to call ".
+ "$unapi_type via json_query (rec_ids has " . scalar(@$rec_ids));
my @loop_recs = @$rec_ids;
my %rec_timeout;
$tmp_data{$unapi_data->{id}} = $unapi_data;
} else { # we're the first or we timed out. success_handler will populate the real value
$unapi_cache->put_cache($unapi_cache_key, { running => $$ }, 10);
+
+ my $sdepth = $unapi_args->{flesh_depth};
+ my $slimit = "acn=>$sdepth,acp=>$sdepth";
+ $slimit .= ",bre=>$sdepth" if $is_meta;
+
$ses->request(
'open-ils.cstore.json_query',
{from => [
- 'unapi.bre', $bid, 'marcxml','record',
+ $unapi_type, $bid, 'marcxml','record',
$unapi_args->{flesh},
$unapi_args->{site},
$unapi_args->{depth},
- 'acn=>' . $unapi_args->{flesh_depth} . ',acp=>' . $unapi_args->{flesh_depth},
+ $slimit,
undef, undef, $unapi_args->{pref_lib}
]}
);
}
-
}