From: Bill Erickson Date: Mon, 9 Feb 2015 19:55:17 +0000 (-0500) Subject: JBAS-237 squashed authority cross-port X-Git-Url: https://old-git.evergreen-ils.org/?a=commitdiff_plain;h=5c940346f70ceb92b685a39b9bb48df43a40c040;p=working%2FEvergreen.git JBAS-237 squashed authority cross-port Commits ae4d0c7~1..7a98ebd Signed-off-by: Bill Erickson Conflicts: Open-ILS/examples/fm_IDL.xml Open-ILS/src/perlmods/lib/OpenILS/Application/AppUtils.pm Open-ILS/src/perlmods/lib/OpenILS/Application/Cat/Authority.pm Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Driver/Pg/QueryParser.pm Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Publisher/authority.pm Open-ILS/src/perlmods/lib/OpenILS/Application/Vandelay.pm Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader.pm Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Browse.pm Open-ILS/src/templates/opac/browse.tt2 Open-ILS/src/templates/opac/css/style.css.tt2 Open-ILS/xul/staff_client/server/cat/marcedit.xul Conflicts: Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Search.pm Conflicts: Open-ILS/web/js/ui/default/vandelay/vandelay.js Open-ILS/xul/staff_client/chrome/content/cat/opac.js --- diff --git a/KCLS/bs_files/BSLWexport.pl b/KCLS/bs_files/BSLWexport.pl index a9fd0e9c62..601df6ffce 100755 --- a/KCLS/bs_files/BSLWexport.pl +++ b/KCLS/bs_files/BSLWexport.pl @@ -17,7 +17,7 @@ use strict; use warnings; -use JSONPrefs; +use Backstage::JSONPrefs; use Backstage::Export; use Backstage::Email; use Backstage::FTP; @@ -37,7 +37,7 @@ while (@ARGV) { $prefs_file ||= $ENV{'HOME'} . "/myprefs.d/bslw.json"; -my $prefs = JSONPrefs->load($prefs_file); +my $prefs = Backstage::JSONPrefs->load($prefs_file); unless ($upload_file) { my $exporter = Backstage::Export->new($prefs); diff --git a/KCLS/bs_files/BSLWimport.pl b/KCLS/bs_files/BSLWimport.pl index 97ddb2b649..0e12d13595 100755 --- a/KCLS/bs_files/BSLWimport.pl +++ b/KCLS/bs_files/BSLWimport.pl @@ -16,13 +16,23 @@ use strict; use warnings; +use utf8; +use v5.8; -use JSONPrefs; +use Backstage::JSONPrefs; use Backstage::FTP; use Backstage::Import; use Archive::Zip qw( :ERROR_CODES :CONSTANTS ); use File::Basename; use Carp; +use DBI; +use DBD::Pg; +use DateTime; +use Date::Parse; +use Data::Dumper; +use Try::Tiny; + +use OpenSRF::System; # pass input file names on the command line as arguments my @input_files = (); @@ -35,6 +45,12 @@ my $rerun = 0; # Only download the files this time. We'll process them later. my $download = 0; +# Date of the export; optional perameter. +my $exportDate; + +my $hostName; +my $thePort; + # Loop through the command line arguments: while (my $arg = shift @ARGV) { if ($arg =~ /\.json$/) { @@ -43,12 +59,21 @@ while (my $arg = shift @ARGV) { $download = 1; } elsif ($arg =~ /^-{1,2}r(?:erun)?$/) { $rerun = 1; + } elsif ($arg =~ /^-{1,2}e(?:xport)?$/) { + $exportDate = shift @ARGV; + } elsif ($arg =~ /^-{1,2}h(?:ostname)?$/) { + $hostName = shift @ARGV; + } elsif ($arg =~ /^-{1,2}p(?:ort)?$/) { + $thePort = shift @ARGV; } else { push(@input_files, $arg); } } -my $prefs = JSONPrefs->load($prefs_file); +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $prefs = Backstage::JSONPrefs->load($prefs_file); # Download files from the ftp server if we have not input files as # arguments. @@ -62,8 +87,16 @@ unless (scalar @input_files) { my $cwd = $prefs->get('import')->working_dir; $cwd .= "/" unless ($cwd =~ /\/$/); +# Get record entries that have been updated after the export date +# And save to a file +my $updatedBibs = retrieve_updated_bibs($exportDate, $hostName, $thePort); + # Create an import object. my $import = Backstage::Import->new($prefs, $rerun); +my $now_time = localtime; + +my $time = time(); +print ("Start BSLWimport.pl " . $now_time . "\n"); foreach my $file (@input_files) { # Skip the reports archive. Maybe someone will want them emailed @@ -80,7 +113,9 @@ foreach my $file (@input_files) { foreach $member ($zip->membersMatching('BIB')) { my $bibfile = $cwd . basename($member->fileName()); if ($member->extractToFileNamed($bibfile) == AZ_OK) { - $import->doFile($bibfile); + $now_time = localtime; + print ("BIB file started at " . $now_time . "\n"); + $import->doFile($bibfile, $updatedBibs); cleanup($bibfile) if ($prefs->get('import')->cleanup); } else { carp "Failed to extract " . $member->fileName() . " to $bibfile"; @@ -92,6 +127,21 @@ foreach my $file (@input_files) { foreach $member ($zip->membersMatching('DEL')) { $authfile = $cwd . basename($member->fileName()); if ($member->extractToFileNamed($authfile) == AZ_OK) { + $now_time = localtime; + print ("DEL file started at " . $now_time . "\n"); + $import->doFile($authfile); + cleanup($authfile) if ($prefs->get('import')->cleanup); + } else { + carp "Failed to extract " . $member->fileName() . " to $authfile"; + } + } + + # Handle updated authorities + foreach $member ($zip->membersMatching('CHG')) { + $authfile = $cwd . basename($member->fileName()); + if ($member->extractToFileNamed($authfile) == AZ_OK) { + $now_time = localtime; + print ("CHG file started at " . $now_time . "\n"); $import->doFile($authfile); cleanup($authfile) if ($prefs->get('import')->cleanup); } else { @@ -99,10 +149,12 @@ foreach my $file (@input_files) { } } - # Handle other authorities - foreach $member ($zip->membersMatching('(?!(BIB|DEL))')) { + # Handle new authorities + foreach $member ($zip->membersMatching('NEW')) { $authfile = $cwd . basename($member->fileName()); if ($member->extractToFileNamed($authfile) == AZ_OK) { + $now_time = localtime; + print ("NEW file started at " . $now_time . "\n"); $import->doFile($authfile); cleanup($authfile) if ($prefs->get('import')->cleanup); } else { @@ -111,6 +163,9 @@ foreach my $file (@input_files) { } } +$now_time = localtime; +print ("End BSLWimport.pl " . $now_time . "\n"); + # Check for authority control options and run authority_control_fields.pl # if it is properly configured. if ($prefs->get('import')->auth_control) { @@ -128,10 +183,115 @@ if ($prefs->get('import')->auth_control) { } } } +print_time( $time ); + +sub print_time { + use integer; + + my $start = shift || 0; + my $elapsed = time() - $start; + my $hours = $elapsed / (60 * 60); + my $seconds = $elapsed % 60; + my $minutes = ($elapsed - $hours * 60 * 60) / 60; + + print "Time elapsed: "; + print "$hours hours, " if $hours; + print "$minutes minutes, " if $minutes; + print "$seconds seconds\n"; +} sub cleanup { # Made this a sub in case we ever want to do more than just unlink # the file. + $now_time = localtime; + print("cleanup started at " . $now_time . "\n"); my $file = shift; return unlink($file); } + +sub retrieve_updated_bibs{ + # Get record entries that have been updated after the export date + + my $exportDate = shift; + my $sc = OpenSRF::Utils::SettingsClient->new; + #my $dbh = get_db_handle(); + my $platform = $sc->config_value( reporter => setup => database => 'driver' ); + my $host = $sc->config_value( reporter => setup => database => 'host' ); + my $port = $sc->config_value( reporter => setup => database => 'port' ); + my $database = $sc->config_value( reporter => setup => database => 'db' ); + if (!$database) { + $database = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $database; + } + my $user = $sc->config_value( reporter => setup => database => 'user' ); + my $pw = $sc->config_value( reporter => setup => database => 'pw' ); + + my $dsn = "dbi:$platform:dbname = $database; host = $host; port = $port"; + + my $dbh = DBI->connect( $dsn, $user, $pw, { + 'PrintError' => 1, + 'RaiseError' => 1, + 'PrintWarn' => 1 + }) or die "Unable to connect: " . $DBI::errstr . "\n"; + + my $sql = "BEGIN; SET statement_timeout = 0; COMMIT; SELECT id FROM biblio.record_entry WHERE edit_date > '$exportDate';"; + print $sql . "\n"; + unless (utf8::is_utf8($sql)){ + utf8::encode($sql); + } + + my $updated_bibs = []; + + try { + + $updated_bibs = $dbh->selectall_arrayref($sql); + + } catch { + warn "Unable to query DB for modified record entries. Aborting script."; + $dbh->disconnect; + return 0; + }; + + $dbh->disconnect; + + # And save to a file + my $updatedFile = "/var/KCLS_AUTH/updated_bibs_" . DateTime->now(time_zone => "local")->ymd(''); + open(MYFILE, '>' , $updatedFile); + + foreach my $id (@$updated_bibs) { + print MYFILE "@$id[0]\n"; + } + close(MYFILE); + + return $updatedFile; +} + +sub parseDateSting { + my $dateString = shift; + + my $epoch = str2time($dateString); + my $date = DateTime->from_epoch(epoch => $epoch); + + return $date +} + +sub get_db_handle { + + my $platform = 'Pg'; + my $database = 'evergreen'; + my $host = 'evergreentest.catalystitservices.com'; + my $port = '5415'; + my $user = 'evergreen'; + my $pw = 'evergreen'; + + my $dsn = "dbi:$platform:dbname = $database; host = $host; port = $port"; + + my $dbh = DBI->connect( $dsn, $user, $pw, { + 'PrintError' => 1, + 'RaiseError' => 1, + 'PrintWarn' => 1, + 'AutoCommit' => 0 # Auto commit off so we can commit/rollback + }) or die "Unable to connect: " . $DBI::errstr . "\n"; + + return $dbh; +} diff --git a/KCLS/bs_files/Backstage/Import.pm b/KCLS/bs_files/Backstage/Import.pm index 18e48d3374..4054733fcb 100644 --- a/KCLS/bs_files/Backstage/Import.pm +++ b/KCLS/bs_files/Backstage/Import.pm @@ -1,5 +1,5 @@ # --------------------------------------------------------------- -# Copyright © 2012 Merrimack Valley Library Consortium +# Copyright © 2012 Merrimack Valley Library Consortium # Jason Stephenson # This program is free software; you can redistribute it and/or modify @@ -12,6 +12,12 @@ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # --------------------------------------------------------------- + +# Import module used to process marc files returned from Backstage. Takes +# files of either bib records or auth records and breaks them down into SQL +# INSERT or UPDATE statements. Writes out batch files with these statements +# that can be executed against the database. + package Backstage::Import; use strict; @@ -19,13 +25,16 @@ use warnings; use Carp; use MARC::Record; -use MARC::File::XML; +use MARC::File::XML ( BinaryEncoding => 'UTF-8' ); use MARC::File::USMARC; use OpenILS::Utils::Cronscript; use OpenILS::Utils::Normalize qw(clean_marc); use DateTime; use DateTime::Format::ISO8601; use Encode; +use Data::Dumper; +use File::Basename; +use DateTime; my $U = 'OpenILS::Application::AppUtils'; @@ -47,9 +56,10 @@ sub new { } sub doFile { - print("Inside doFile\n"); + #print("Inside doFile\n"); my $self = shift; my $filename = shift; + my $updatedBibs = shift; my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); if ($file) { @@ -57,14 +67,22 @@ sub doFile { $self->{'auth'} = $self->{'scr'}->authenticate( $self->{'prefs'}->evergreen->authentication->TO_JSON ); - print("Filename: " . $filename . "\n"); if ($filename =~ /BIB/) { - $self->doBibs($file); + print("Filename contains \"BIB\"\n"); + $self->doBibs($file, $updatedBibs); } elsif ($filename =~ /DEL/) { - $self->doDeletes($file); - } elsif ($filename =~ /AUTH/) { - $self->doAuths($file); + print("Filename contains \"DEL\"\n"); + $self->doDeletes($file); + } elsif ($filename =~ /CHG/) { + print("Filename contains \"CHG\"\n"); + $self->doAuths($file); + } elsif ($filename =~ /NEW/) { + print("Filename contains \"NEW\"\n"); + $self->doNewAuths($file); + } else { + print("Filename does not contain \"BIB\", \"DEL\", or \"AUTH\". Sad Times.\n"); } + $file->close(); $self->{'scr'}->logout; } else { @@ -76,40 +94,72 @@ sub doBibs { print("Inside doBibs\n"); my $self = shift; my $file = shift; + my $updatedBibFile = shift; my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); - print("start while in doBibs\n"); + my $bib_count = 0; + my $baseFileName = basename($file->{filename}); + print $baseFileName . "\n"; + my $fileOut = ""; + my $file_number = 0; + + # list of updated record entries since export + my %updatedBibs = {}; + + # for unupdated marc records + my $unupdated_base = "unupdated_". $baseFileName . "_" . DateTime->now(time_zone => "local")->ymd(''); + my $unupdated = ""; + my $unupdated_file_number = 0; + my $unupdated_count = 0; + + if ( open (FH, '<', $updatedBibFile) ) { + while () { + chomp; + $updatedBibs{ $_ } = ''; + } + close(FH); + } + else { + print "Unable to open file containing updated record entry id's\n"; + print "continuing with the asumption no record entries have been updated\n"; + } + while (my $input = $file->next()) { + if($bib_count % 100 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".sql";#=~ s/\.marc/\.sql/i; + print("----- Writing to File:$fileOut\n"); + } my $id = $input->subfield('901', 'c'); - print ("id = " . $id . "\n"); + #print ("\nid = " . $id . "\n"); if ($id) { - my $bre = $editor->retrieve_biblio_record_entry($id); - print("bre = " . $bre . "\n"); - next if (!$bre || $U->is_true($bre->deleted)); - my $record = MARC::Record->new_from_xml($bre->marc, 'UTF8'); - my $str = $bre->edit_date; - $str =~ s/\d\d$//; - my $edit_date = DateTime::Format::ISO8601->parse_datetime($str); - print("export_date = " . $self->{'export_date'} . "\n"); - if (DateTime->compare($edit_date, $self->{'export_date'}) < 0) { - my $needImport = date_comp($input, $record); - if ($needImport > 0) { - print("Import $id\n") - if ($self->{'prefs'}->get('import')->print_import); - my $newMARC = $input->as_xml_record(); - $bre->marc(clean_marc($newMARC)); - $bre->edit_date('now()'); - $editor->xact_begin; - $editor->update_biblio_record_entry($bre); - $editor->commit; - push(@{$self->{'bibs'}}, $id); - } else { - print("Keep $id\n") - if ($self->{'prefs'}->get('import')->print_keep); + if(exists $updatedBibs{$id}) { + #if id in list of updated ids write to a seperate file + if($unupdated_count % 10 eq 0) { + $unupdated_file_number++; + $unupdated = $unupdated_base . "." . $unupdated_file_number . ".mrc";#=~ s/\.marc/\.sql/i; + print("----- Writing to File:$unupdated\n"); } - } + open(MYFILE,">>/var/KCLS_AUTH/unupdated/$unupdated"); + binmode(MYFILE, ":utf8"); + print MYFILE $input->as_usmarc() . "\n"; + $unupdated_count++; + } + else { + #else id not in list go ahead an add to the update script + + open(MYFILE,">>/var/KCLS_AUTH/bibs_to_do/$fileOut"); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$, fingerprint = biblio.extract_fingerprint(\$marc\$ $marc \$marc\$), quality = biblio.extract_quality(\$marc\$ $marc \$marc\$,'eng','BKS') WHERE id = $id;\n"; + close(MYFILE); + } } else { carp "No 901\$c in input record $id"; } + $bib_count++; } $editor->finish; } @@ -118,14 +168,26 @@ sub doDeletes { my $self = shift; my $file = shift; my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + my $fileOut = ""; + my $baseFileName = basename($file->{filename}); + my $id; + $fileOut = $baseFileName .".sql";#=~ s/\.marc/\.sql/i; + print("Writing to File:$fileOut\n"); while (my $input = $file->next()) { my @ares = find_matching_ares($editor, $input); if (scalar @ares) { $editor->xact_begin; foreach my $are (@ares) { - print("Deleting auth " . $are->id . "\n") - if ($self->{'prefs'}->get('import')->print_delete); - $editor->delete_authority_record_entry($are); + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + + $id = $are->id; + open(MYFILE,">>/var/KCLS_AUTH/auths_to_do/001.$fileOut.DELETE.sql"); + print MYFILE "DELETE FROM authority.record_entry WHERE id = $id;\n"; + print MYFILE "UPDATE authority.record_entry SET edit_date = NOW() WHERE id = $id;\n"; + close(MYFILE); + # replacing delete with an sql command to delete later + # $editor->delete_authority_record_entry($are); } $editor->commit; } @@ -134,43 +196,78 @@ sub doDeletes { } sub doAuths { - print("Inside doAuths\n"); + #print("Inside doAuths\n"); my $self = shift; my $file = shift; my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + my $baseFileName = basename($file->{filename}); + my $file_number = 0; + my $auth_count = 0; + my $fileOut = ""; while (my $input = $file->next()) { + if($auth_count % 500 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".sql";#=~ s/\.marc/\.sql/i; + print("Writing to File:$fileOut\n"); + } my @ares = find_matching_ares($editor, $input); if (scalar(@ares)) { foreach my $are (@ares) { my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); if (!$self->{'rerun'} || - ($self->{'rerun'} && date_comp($input, $record))) { - $editor->xact_begin; - print("Updating auth: " . $are->id . "\n") - if ($self->{'prefs'}->get('import')->print_import); - my $newMARC = $input->as_xml_record(); - $are->marc(clean_marc($newMARC)); - $are->edit_date('now()'); - $editor->update_authority_record_entry($are); - $editor->commit; + ($self->{'rerun'} && date_comp($input, $record))) { + open(MYFILE,">>/var/KCLS_AUTH/auths_to_do/001.$fileOut.UPDATE.sql"); + my $id = $are->id; + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE authority.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$ WHERE id = $id;\n"; + close(MYFILE); } } } else { - $editor->xact_begin; - my $are = Fieldmapper::authority::record_entry->new(); - my $marc = $input->as_xml_record(); - $are->marc(clean_marc($marc)); - $are->last_xact_id("IMPORT-" . time); - $are->source(2); - if ($are = $editor->create_authority_record_entry($are)) { - print("Created new auth " . $are->id . "\n") - if ($self->{'prefs'}->get('import')->print_import); - $self->{'new_auths'}++; - } else { - carp("Failed to create new auth\n"); - } - $editor->commit; + # my $are = Fieldmapper::authority::record_entry->new(); + my $newMarc = $input->as_xml_record(); + my $marc = clean_marc($newMarc); + my $last_xact_id = "IMPORT-" . time; + my $source = 2; + open(MYFILE,">>/var/KCLS_AUTH/auths_to_do/001.$fileOut.INSERT.sql"); + + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "INSERT INTO authority.record_entry (marc,last_xact_id,source) Values( \$marc\$ $marc \$marc\$, '$last_xact_id', $source);\n"; + close(MYFILE); } + $auth_count++; + } + $editor->finish; +} + +sub doNewAuths { + #print("Inside doNewAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + my $baseFileName = basename($file->{filename}); + my $file_number = 0; + my $auth_count = 0; + my $fileOut = ""; + while (my $input = $file->next()) { + if($auth_count % 500 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".NEW.INSERT.sql";#=~ s/\.marc/\.sql/i; + print("Writing to File:$fileOut\n"); + } + my $newMarc = $input->as_xml_record(); + my $marc = clean_marc($newMarc); + my $last_xact_id = "IMPORT-" . time; + my $source = 2; + open(MYFILE,">>/var/KCLS_AUTH/auths_to_do/001.$fileOut"); + + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "INSERT INTO authority.record_entry (marc,last_xact_id,source) Values( \$marc\$ $marc \$marc\$, '$last_xact_id', $source);\n"; + close(MYFILE); + $auth_count++; } $editor->finish; } @@ -242,6 +339,7 @@ sub fix005 { } sub date_comp { + #print ("in date_comp\n"); my ($bslw, $own) = @_; my $bslw_date = undef; my $rec_date = undef; @@ -251,9 +349,21 @@ sub date_comp { fix005($bslw->field('005')->data()) ) if (defined($bslw->field('005'))); - $rec_date = DateTime::Format::ISO8601->parse_datetime( - fix005($own->field('005')->data()) - ) if (defined($own->field('005'))); + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } if (defined($bslw_date) && defined($rec_date)) { $need_import = DateTime->compare($bslw_date, $rec_date); @@ -261,6 +371,8 @@ sub date_comp { $need_import = 0; } + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + return $need_import; } diff --git a/KCLS/bs_files/Backstage/Import.pm.old-2014.01.16 b/KCLS/bs_files/Backstage/Import.pm.old-2014.01.16 new file mode 100644 index 0000000000..18e48d3374 --- /dev/null +++ b/KCLS/bs_files/Backstage/Import.pm.old-2014.01.16 @@ -0,0 +1,267 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + print("Filename: " . $filename . "\n"); + if ($filename =~ /BIB/) { + $self->doBibs($file); + } elsif ($filename =~ /DEL/) { + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + $self->doAuths($file); + } + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + print("start while in doBibs\n"); + while (my $input = $file->next()) { + my $id = $input->subfield('901', 'c'); + print ("id = " . $id . "\n"); + if ($id) { + my $bre = $editor->retrieve_biblio_record_entry($id); + print("bre = " . $bre . "\n"); + next if (!$bre || $U->is_true($bre->deleted)); + my $record = MARC::Record->new_from_xml($bre->marc, 'UTF8'); + my $str = $bre->edit_date; + $str =~ s/\d\d$//; + my $edit_date = DateTime::Format::ISO8601->parse_datetime($str); + print("export_date = " . $self->{'export_date'} . "\n"); + if (DateTime->compare($edit_date, $self->{'export_date'}) < 0) { + my $needImport = date_comp($input, $record); + if ($needImport > 0) { + print("Import $id\n") + if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $bre->marc(clean_marc($newMARC)); + $bre->edit_date('now()'); + $editor->xact_begin; + $editor->update_biblio_record_entry($bre); + $editor->commit; + push(@{$self->{'bibs'}}, $id); + } else { + print("Keep $id\n") + if ($self->{'prefs'}->get('import')->print_keep); + } + } + } else { + carp "No 901\$c in input record $id"; + } + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + print("Deleting auth " . $are->id . "\n") + if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + $editor->xact_begin; + print("Updating auth: " . $are->id . "\n") + if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $are->marc(clean_marc($newMARC)); + $are->edit_date('now()'); + $editor->update_authority_record_entry($are); + $editor->commit; + } + } + } else { + $editor->xact_begin; + my $are = Fieldmapper::authority::record_entry->new(); + my $marc = $input->as_xml_record(); + $are->marc(clean_marc($marc)); + $are->last_xact_id("IMPORT-" . time); + $are->source(2); + if ($are = $editor->create_authority_record_entry($are)) { + print("Created new auth " . $are->id . "\n") + if ($self->{'prefs'}->get('import')->print_import); + $self->{'new_auths'}++; + } else { + carp("Failed to create new auth\n"); + } + $editor->commit; + } + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import.pm.old-2014.01.21 b/KCLS/bs_files/Backstage/Import.pm.old-2014.01.21 new file mode 100644 index 0000000000..42fac0979a --- /dev/null +++ b/KCLS/bs_files/Backstage/Import.pm.old-2014.01.21 @@ -0,0 +1,283 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + print("Filename: " . $filename . "\n"); + if ($filename =~ /BIB/) { + $self->doBibs($file); + } elsif ($filename =~ /DEL/) { + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + $self->doAuths($file); + } + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + #print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + while (my $input = $file->next()) { + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + my $bre = $editor->retrieve_biblio_record_entry($id); + #print("bre = " . $bre . "\n"); + next if (!$bre || $U->is_true($bre->deleted)); + my $record = MARC::Record->new_from_xml($bre->marc, 'UTF8'); + my $str = $bre->edit_date; + $str =~ s/\d\d$//; + my $edit_date = DateTime::Format::ISO8601->parse_datetime($str); + #print("edit_date = " . $edit_date . " export_date = " . $self->{'export_date'} . "\n"); + if (DateTime->compare($edit_date, $self->{'export_date'}) < 0) { + my $needImport = date_comp($input, $record); + #print ("input = " . $input . " record = " . $record . " needImport = " . $needImport . " \n"); + if ($needImport > 0) { + #print("Import Bib $id\n") + # if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $bre->marc(clean_marc($newMARC)); + $bre->edit_date('now()'); + $editor->xact_begin; + $editor->update_biblio_record_entry($bre); + $editor->commit; + push(@{$self->{'bibs'}}, $id); + } else { + #print("Keep Bib $id\n") + # if ($self->{'prefs'}->get('import')->print_keep); + } + } + } else { + carp "No 901\$c in input record $id"; + } + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + $editor->xact_begin; + #print("Updating auth: " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $are->marc(clean_marc($newMARC)); + $are->edit_date('now()'); + $editor->update_authority_record_entry($are); + $editor->commit; + } + } + } else { + $editor->xact_begin; + my $are = Fieldmapper::authority::record_entry->new(); + my $marc = $input->as_xml_record(); + $are->marc(clean_marc($marc)); + $are->last_xact_id("IMPORT-" . time); + $are->source(2); + if ($are = $editor->create_authority_record_entry($are)) { + #print("Created new auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + $self->{'new_auths'}++; + } else { + carp("Failed to create new auth\n"); + } + $editor->commit; + } + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import2.pm b/KCLS/bs_files/Backstage/Import2.pm new file mode 100644 index 0000000000..e3fd162181 --- /dev/null +++ b/KCLS/bs_files/Backstage/Import2.pm @@ -0,0 +1,291 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + print("Filename: " . $filename . "\n"); + if ($filename =~ /BIB/) { + $self->doBibs($file); + } elsif ($filename =~ /DEL/) { + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + $self->doAuths($file); + } + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + #print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + while (my $input = $file->next()) { + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + + open(MYFILE,'>>\var\KCLS_AUTH\data.sql'); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$\$ $marc \$\$ WHERE id = $id"; + close(MYFILE); + # my $bre = $editor->retrieve_biblio_record_entry($id); + #print("bre = " . $bre . "\n"); + # next if (!$bre || $U->is_true($bre->deleted)); + # my $record = MARC::Record->new_from_xml($bre->marc, 'UTF8'); + # my $str = $bre->edit_date; + # $str =~ s/\d\d$//; + # my $edit_date = DateTime::Format::ISO8601->parse_datetime($str); + #print("edit_date = " . $edit_date . " export_date = " . $self->{'export_date'} . "\n"); + # if (DateTime->compare($edit_date, $self->{'export_date'}) < 0) { + # my $needImport = date_comp($input, $record); + #print ("input = " . $input . " record = " . $record . " needImport = " . $needImport . " \n"); + # if ($needImport > 0) { + #print("Import Bib $id\n") + # if ($self->{'prefs'}->get('import')->print_import); + # my $newMARC = $input->as_xml_record(); + # $bre->marc(clean_marc($newMARC)); + # $bre->edit_date('now()'); + # $editor->xact_begin; + # $editor->update_biblio_record_entry($bre); + # $editor->commit; + # push(@{$self->{'bibs'}}, $id); + # } else { + #print("Keep Bib $id\n") + # if ($self->{'prefs'}->get('import')->print_keep); + # } + # } + } else { + carp "No 901\$c in input record $id"; + } + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + $editor->xact_begin; + #print("Updating auth: " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $are->marc(clean_marc($newMARC)); + $are->edit_date('now()'); + $editor->update_authority_record_entry($are); + $editor->commit; + } + } + } else { + $editor->xact_begin; + my $are = Fieldmapper::authority::record_entry->new(); + my $marc = $input->as_xml_record(); + $are->marc(clean_marc($marc)); + $are->last_xact_id("IMPORT-" . time); + $are->source(2); + if ($are = $editor->create_authority_record_entry($are)) { + #print("Created new auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + $self->{'new_auths'}++; + } else { + carp("Failed to create new auth\n"); + } + $editor->commit; + } + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import3.pm b/KCLS/bs_files/Backstage/Import3.pm new file mode 100644 index 0000000000..96afd596ba --- /dev/null +++ b/KCLS/bs_files/Backstage/Import3.pm @@ -0,0 +1,291 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + print("Filename: " . $filename . "\n"); + if ($filename =~ /BIB/) { + $self->doBibs($file); + } elsif ($filename =~ /DEL/) { + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + $self->doAuths($file); + } + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + #print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + while (my $input = $file->next()) { + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + + open(MYFILE,'>>/var/KCLS_AUTH/data3.sql'); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$\$ $marc \$\$, fingerprint = biblio.extract_fingerprint(\$\$ $marc \$\$), quality = biblio.extract_quality(\$\$ $marc \$\$,'eng','BKS') WHERE id = $id;"; + close(MYFILE); + # my $bre = $editor->retrieve_biblio_record_entry($id); + #print("bre = " . $bre . "\n"); + # next if (!$bre || $U->is_true($bre->deleted)); + # my $record = MARC::Record->new_from_xml($bre->marc, 'UTF8'); + # my $str = $bre->edit_date; + # $str =~ s/\d\d$//; + # my $edit_date = DateTime::Format::ISO8601->parse_datetime($str); + #print("edit_date = " . $edit_date . " export_date = " . $self->{'export_date'} . "\n"); + # if (DateTime->compare($edit_date, $self->{'export_date'}) < 0) { + # my $needImport = date_comp($input, $record); + #print ("input = " . $input . " record = " . $record . " needImport = " . $needImport . " \n"); + # if ($needImport > 0) { + #print("Import Bib $id\n") + # if ($self->{'prefs'}->get('import')->print_import); + # my $newMARC = $input->as_xml_record(); + # $bre->marc(clean_marc($newMARC)); + # $bre->edit_date('now()'); + # $editor->xact_begin; + # $editor->update_biblio_record_entry($bre); + # $editor->commit; + # push(@{$self->{'bibs'}}, $id); + # } else { + #print("Keep Bib $id\n") + # if ($self->{'prefs'}->get('import')->print_keep); + # } + # } + } else { + carp "No 901\$c in input record $id"; + } + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + $editor->xact_begin; + #print("Updating auth: " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $are->marc(clean_marc($newMARC)); + $are->edit_date('now()'); + $editor->update_authority_record_entry($are); + $editor->commit; + } + } + } else { + $editor->xact_begin; + my $are = Fieldmapper::authority::record_entry->new(); + my $marc = $input->as_xml_record(); + $are->marc(clean_marc($marc)); + $are->last_xact_id("IMPORT-" . time); + $are->source(2); + if ($are = $editor->create_authority_record_entry($are)) { + #print("Created new auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + $self->{'new_auths'}++; + } else { + carp("Failed to create new auth\n"); + } + $editor->commit; + } + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import4.pm b/KCLS/bs_files/Backstage/Import4.pm new file mode 100644 index 0000000000..d57a31a2cb --- /dev/null +++ b/KCLS/bs_files/Backstage/Import4.pm @@ -0,0 +1,267 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + print("Filename: " . $filename . "\n"); + if ($filename =~ /BIB/) { + $self->doBibs($file); + } elsif ($filename =~ /DEL/) { + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + $self->doAuths($file); + } + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + #print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + while (my $input = $file->next()) { + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + + open(MYFILE,'>>/var/KCLS_AUTH/data.sql'); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$, fingerprint = biblio.extract_fingerprint(\$marc\$ $marc \$marc\$), quality = extract_quality(\$marc\$ $marc \$marc\$,'eng','BKS') WHERE id = $id;\n"; + close(MYFILE); + + } else { + carp "No 901\$c in input record $id"; + } + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + $editor->xact_begin; + #print("Updating auth: " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + my $newMARC = $input->as_xml_record(); + $are->marc(clean_marc($newMARC)); + $are->edit_date('now()'); + $editor->update_authority_record_entry($are); + $editor->commit; + } + } + } else { + $editor->xact_begin; + my $are = Fieldmapper::authority::record_entry->new(); + my $marc = $input->as_xml_record(); + $are->marc(clean_marc($marc)); + $are->last_xact_id("IMPORT-" . time); + $are->source(2); + if ($are = $editor->create_authority_record_entry($are)) { + #print("Created new auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_import); + $self->{'new_auths'}++; + } else { + carp("Failed to create new auth\n"); + } + $editor->commit; + } + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import5.pm b/KCLS/bs_files/Backstage/Import5.pm new file mode 100644 index 0000000000..bcc791d01d --- /dev/null +++ b/KCLS/bs_files/Backstage/Import5.pm @@ -0,0 +1,263 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + print("Filename: " . $filename . "\n"); + if ($filename =~ /BIB/) { + $self->doBibs($file); + } elsif ($filename =~ /DEL/) { + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + $self->doAuths($file); + } + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + #print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + while (my $input = $file->next()) { + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + + open(MYFILE,'>>/var/KCLS_AUTH/bibdata.sql'); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$, fingerprint = biblio.extract_fingerprint(\$marc\$ $marc \$marc\$), quality = biblio.extract_quality(\$marc\$ $marc \$marc\$,'eng','BKS') WHERE id = $id;\n"; + close(MYFILE); + + } else { + carp "No 901\$c in input record $id"; + } + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + open(MYFILE,'>>/var/KCLS_AUTH/authoritydata.sql'); + my $id = $are->id; + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE authority.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$ WHERE id = $id;\n"; + close(MYFILE); + } + } + } else { + # my $are = Fieldmapper::authority::record_entry->new(); + my $newMarc = $input->as_xml_record(); + my $marc = clean_marc($newMarc); + my $last_xact_id = "IMPORT-" . time; + my $source = 2; + open(MYFILE,'>>/var/KCLS_AUTH/authoritydata.sql'); + + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "INSERT INTO authority.record_entry (marc,last_xact_id,source) Values( \$marc\$ $marc \$marc\$, '$last_xact_id', $source);\n"; + close(MYFILE); + } + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import6.pm b/KCLS/bs_files/Backstage/Import6.pm new file mode 100644 index 0000000000..08aefa7809 --- /dev/null +++ b/KCLS/bs_files/Backstage/Import6.pm @@ -0,0 +1,318 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML; +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; +use Data::Dumper; +use File::Basename; +use DateTime; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $updatedBibs = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + if ($filename =~ /BIB/) { + print("Filename contains \"BIB\"\n"); + $self->doBibs($file, $updatedBibs); + } elsif ($filename =~ /DEL/) { + print("Filename contains \"DEL\"\n"); + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + print("Filename contains \"AUTH\"\n"); + $self->doAuths($file); + } else { + print("Filename does not contain \"BIB\", \"DEL\", or \"AUTH\". Sad Times.\n"); + } + + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $updatedBibFile = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + my $bib_count = 0; + my $baseFileName = basename($file->{filename}); + print $baseFileName . "\n"; + my $fileOut = ""; + my $file_number = 0; + + # open list of updated record entries + my %updatedBibs = {}; + my $unupdated = "unupdated_record_entries_" . DateTime->now(time_zone => "local")->ymd(''); + if ( open (FH, '<', $updatedBibFile) ) { + while () { + chomp; + $updatedBibs{ $_ } = ''; + } + close(FH); + } + else { + print "Unable to open file containing updated record entry id's\n"; + print "continuing with the asumption no record entries have been updated\n"; + } + + while (my $input = $file->next()) { + if($bib_count % 10000 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".sql";#=~ s/\.marc/\.sql/i; + print("Writing to File:$fileOut\n"); + } + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + if(exists $updatedBibs{$id}) { + #if id in list of updated ids write to a seperate file + open(MYFILE,">>/var/KCLS_AUTH/$unupdated"); + my $marc = clean_marc($input->as_xml_record()); + print MYFILE "$marc\n"; + } + else { + #else id not in list go ahead an add to the update script + + open(MYFILE,">>/var/KCLS_AUTH/$fileOut"); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$, fingerprint = biblio.extract_fingerprint(\$marc\$ $marc \$marc\$), quality = biblio.extract_quality(\$marc\$ $marc \$marc\$,'eng','BKS') WHERE id = $id;\n"; + close(MYFILE); + } + } else { + carp "No 901\$c in input record $id"; + } + $bib_count++; + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + my $baseFileName = basename($file->{filename}); + my $file_number = 0; + my $auth_count = 0; + my $fileOut = ""; + while (my $input = $file->next()) { + if($auth_count % 10000 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".sql";#=~ s/\.marc/\.sql/i; + print("Writing to File:$fileOut\n"); + } + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + open(MYFILE,">>/var/KCLS_AUTH/$fileOut.UPDATE.sql"); + my $id = $are->id; + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE authority.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$ WHERE id = $id;\n"; + close(MYFILE); + } + } + } else { + # my $are = Fieldmapper::authority::record_entry->new(); + my $newMarc = $input->as_xml_record(); + my $marc = clean_marc($newMarc); + my $last_xact_id = "IMPORT-" . time; + my $source = 2; + open(MYFILE,">>/var/KCLS_AUTH/001.$fileOut.INSERT.sql"); + + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "INSERT INTO authority.record_entry (marc,last_xact_id,source) Values( \$marc\$ $marc \$marc\$, '$last_xact_id', $source);\n"; + close(MYFILE); + } + $bib_count++; + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/Import7.pm b/KCLS/bs_files/Backstage/Import7.pm new file mode 100644 index 0000000000..e7f92381e0 --- /dev/null +++ b/KCLS/bs_files/Backstage/Import7.pm @@ -0,0 +1,329 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- +package Backstage::Import; + +use strict; +use warnings; + +use Carp; +use MARC::Record; +use MARC::File::XML ( BinaryEncoding => 'UTF-8' ); +use MARC::File::USMARC; +use OpenILS::Utils::Cronscript; +use OpenILS::Utils::Normalize qw(clean_marc); +use DateTime; +use DateTime::Format::ISO8601; +use Encode; +use Data::Dumper; +use File::Basename; +use DateTime; + +my $U = 'OpenILS::Application::AppUtils'; + +sub new { + my $class = shift; + my $self = {}; + $self->{'prefs'} = shift; + $self->{'rerun'} = shift; + $self->{'utf8'} = 0; + my $dstr = $self->{'prefs'}->export->last_run_date; + $dstr =~ s/ /T/; + $dstr =~ s/\.\d+//; + $dstr =~ s/([-+]\d\d)\d\d$/$1/; + $self->{'export_date'} = DateTime::Format::ISO8601->parse_datetime($dstr); + $self->{'bibs'} = []; + $self->{'new_auths'} = 0; + bless($self, $class); + return $self; +} + +sub doFile { + #print("Inside doFile\n"); + my $self = shift; + my $filename = shift; + my $updatedBibs = shift; + my $isUTF8 = (($filename =~ /\.UTF8$/) || $self->{'utf8'}); + my $file = MARC::File::USMARC->in($filename, ($isUTF8) ? 'UTF8' : undef); + if ($file) { + $self->{'scr'} = OpenILS::Utils::Cronscript->new({nolockfile=>1}); + $self->{'auth'} = $self->{'scr'}->authenticate( + $self->{'prefs'}->evergreen->authentication->TO_JSON + ); + if ($filename =~ /BIB/) { + print("Filename contains \"BIB\"\n"); + $self->doBibs($file, $updatedBibs); + } elsif ($filename =~ /DEL/) { + print("Filename contains \"DEL\"\n"); + $self->doDeletes($file); + } elsif ($filename =~ /AUTH/) { + print("Filename contains \"AUTH\"\n"); + $self->doAuths($file); + } else { + print("Filename does not contain \"BIB\", \"DEL\", or \"AUTH\". Sad Times.\n"); + } + + $file->close(); + $self->{'scr'}->logout; + } else { + carp "Failed to read MARC from $filename."; + } +} + +sub doBibs { + print("Inside doBibs\n"); + my $self = shift; + my $file = shift; + my $updatedBibFile = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + #print("start while in doBibs\n"); + my $bib_count = 0; + my $baseFileName = basename($file->{filename}); + print $baseFileName . "\n"; + my $fileOut = ""; + my $file_number = 0; + + # list of updated record entries since export + my %updatedBibs = {}; + + # for unupdated marc records + my $unupdated_base = "unupdated_record_entries_" . DateTime->now(time_zone => "local")->ymd(''); + my $unupdated = ""; + my $unupdated_file_number = 0; + my $unupdated_count = 0; + + if ( open (FH, '<', $updatedBibFile) ) { + while () { + chomp; + $updatedBibs{ $_ } = ''; + } + close(FH); + } + else { + print "Unable to open file containing updated record entry id's\n"; + print "continuing with the asumption no record entries have been updated\n"; + } + + while (my $input = $file->next()) { + if($bib_count % 10000 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".sql";#=~ s/\.marc/\.sql/i; + print("----- Writing to File:$fileOut\n"); + } + my $id = $input->subfield('901', 'c'); + #print ("\nid = " . $id . "\n"); + if ($id) { + if(exists $updatedBibs{$id}) { + #if id in list of updated ids write to a seperate file + if($unupdated_count % 200 eq 0) { + $unupdated_file_number++; + $unupdated = $unupdated_base . "." . $unupdated_file_number . ".mrc";#=~ s/\.marc/\.sql/i; + print("----- Writing to File:$unupdated\n"); + } + open(MYFILE,">>/var/KCLS_AUTH/unupdated/$unupdated"); + print MYFILE $input->as_usmarc() . "\n"; + $unupdated_count++; + } + else { + #else id not in list go ahead an add to the update script + + open(MYFILE,">>/var/KCLS_AUTH/$fileOut"); + + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE biblio.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$, fingerprint = biblio.extract_fingerprint(\$marc\$ $marc \$marc\$), quality = biblio.extract_quality(\$marc\$ $marc \$marc\$,'eng','BKS') WHERE id = $id;\n"; + close(MYFILE); + } + } else { + carp "No 901\$c in input record $id"; + } + $bib_count++; + } + $editor->finish; +} + +sub doDeletes { + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + while (my $input = $file->next()) { + my @ares = find_matching_ares($editor, $input); + if (scalar @ares) { + $editor->xact_begin; + foreach my $are (@ares) { + #print("Deleting auth " . $are->id . "\n") + # if ($self->{'prefs'}->get('import')->print_delete); + $editor->delete_authority_record_entry($are); + } + $editor->commit; + } + } + $editor->finish; +} + +sub doAuths { + #print("Inside doAuths\n"); + my $self = shift; + my $file = shift; + my $editor = $self->{'scr'}->editor(authtoken=>$self->{'auth'}); + my $baseFileName = basename($file->{filename}); + my $file_number = 0; + my $auth_count = 0; + my $fileOut = ""; + while (my $input = $file->next()) { + if($auth_count % 10000 eq 0) { + $file_number++; + $fileOut = $baseFileName . "." . $file_number . ".sql";#=~ s/\.marc/\.sql/i; + print("Writing to File:$fileOut\n"); + } + my @ares = find_matching_ares($editor, $input); + if (scalar(@ares)) { + foreach my $are (@ares) { + my $record = MARC::Record->new_from_xml($are->marc, 'UTF8'); + if (!$self->{'rerun'} || + ($self->{'rerun'} && date_comp($input, $record))) { + open(MYFILE,">>/var/KCLS_AUTH/$fileOut.UPDATE.sql"); + my $id = $are->id; + #Update using 'marc(clean_marc($newMARC)); + my $newMARC = $input->as_xml_record(); + my $marc = clean_marc($newMARC); + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "UPDATE authority.record_entry SET edit_date = NOW(), marc = \$marc\$ $marc \$marc\$ WHERE id = $id;\n"; + close(MYFILE); + } + } + } else { + # my $are = Fieldmapper::authority::record_entry->new(); + my $newMarc = $input->as_xml_record(); + my $marc = clean_marc($newMarc); + my $last_xact_id = "IMPORT-" . time; + my $source = 2; + open(MYFILE,">>/var/KCLS_AUTH/001.$fileOut.INSERT.sql"); + + #Adding marc between $$ quotes to try to avoid instances where the marc record contains $$ i.e. 'Joey Bada$$' + print MYFILE "INSERT INTO authority.record_entry (marc,last_xact_id,source) Values( \$marc\$ $marc \$marc\$, '$last_xact_id', $source);\n"; + close(MYFILE); + } + $auth_count++; + } + $editor->finish; +} + +sub utf8 { + my $self = shift; + if (@_) { + $self->{'utf8'} = shift; + } + return $self->{'utf8'}; +} + +sub rerun { + my $self = shift; + if (@_) { + $self->{'rerun'} = shift; + } + return $self->{'rerun'}; +} + +# read only property. +sub bibs { + my $self = shift; + return $self->{'bibs'}; +} + +sub have_new_auths { + my $self = shift; + return $self->{'new_auths'}; +} + +sub find_matching_ares { + my $e = shift; + my $rec = shift; + my @results = (); + my $afrs = []; + my $subfield = $rec->subfield('010', 'a'); + if ($subfield) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '010', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } elsif ($subfield = $rec->subfield('035', 'a')) { + $afrs = $e->search_authority_full_rec( + { + 'tag' => '035', + 'subfield' => 'a', + 'value' => { "=" => ['naco_normalize', $subfield, 'a'] } + } + ); + foreach my $afr (@$afrs) { + push(@results, $e->retrieve_authority_record_entry($afr->record)); + } + } + return @results; +} + +sub fix005 { + my $in = shift; + substr($in,8,0) = 'T'; + $in =~ s/\.0$//; + return $in; +} + +sub date_comp { + #print ("in date_comp\n"); + my ($bslw, $own) = @_; + my $bslw_date = undef; + my $rec_date = undef; + my $need_import = 1; + + $bslw_date = DateTime::Format::ISO8601->parse_datetime( + fix005($bslw->field('005')->data()) + ) if (defined($bslw->field('005'))); + + eval { DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data())) }; + if ( $@ ) { + if (defined($own->field('005'))) { + print "ERROR: Invalid date " . $own->field('005')->data() . " \n" + } else { + # print "ERROR: 005 field not defined\n"; + # Since there is no 005 field it needs an import. + } + } + else { + $rec_date = DateTime::Format::ISO8601->parse_datetime( + fix005($own->field('005')->data()) + ) if (defined($own->field('005'))); + } + + if (defined($bslw_date) && defined($rec_date)) { + $need_import = DateTime->compare($bslw_date, $rec_date); + } elsif (defined($rec_date) && !defined($bslw_date)) { + $need_import = 0; + } + + #print ("bslw_date = " . $bslw_date . "; rec_date = " . $rec_date . "; need_import = " . $need_import . "\n"); + + return $need_import; +} + +1; diff --git a/KCLS/bs_files/Backstage/JSONPrefs.pm b/KCLS/bs_files/Backstage/JSONPrefs.pm new file mode 100644 index 0000000000..202d719bcf --- /dev/null +++ b/KCLS/bs_files/Backstage/JSONPrefs.pm @@ -0,0 +1,158 @@ +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the Lesser GNU General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Lesser General Public License for more details. +# --------------------------------------------------------------- +package Backstage::JSONPrefs; + +use strict; +use warnings; + +# A simple perl module to load and save preferences as JSON objects. +BEGIN { + use Carp; + use Encode; + use Exporter; + use JSON::XS; + use Scalar::Util qw/blessed/; + use vars qw/$AUTOLOAD/; + our ($VERSION); + $VERSION = '1.1'; +} + +# Create a new, empty prefs object. +sub new { + my $class = shift; + my $self = {}; + $self->{':pretty:'} = shift; + $self->{':file:'} = undef; + $self->{':prefs:'} = {}; + bless $self, $class; + return $self; +} + +# Load preferences from a JSON file. Since we are a blessed hashref, +# we expect the JSON file to contain a JSON object. If called as a +# class method, creates a new prefs object. If called as an instance +# method, replaces $self with the loaded data. +sub load { + my $class_or_self = shift; + my $file = shift; + croak("No filename supplied") unless (defined($file)); + + my $self = undef; + + my $content; + if (open(FILE, "<:utf8", "$file")) { + while (my $line = ) { + $content .= $line; + } + close(FILE); + } + + if ($content) { + if (blessed($class_or_self)) { + $self = $class_or_self; + } else { + $self = $class_or_self->new(); + } + $self->{':file:'} = $file; + $self->{':prefs:'} = decode_json($content); + } + + return $self; +} + +# Get or set whether or not we pretty print when saving. +sub pretty { + my $self = shift; + if (@_) { + $self->{':pretty:'} = shift; + } + return $self->{':pretty:'}; +} + +# Write the preference data to a named file. +sub save { + my $self = shift; + my $file = shift || $self->{':file:'}; + if ($file && open(FILE, ">:utf8", "$file")) { + my $pretty = 0; + # Check if $self->{':pretty:'} is defined && true selon perl. + $pretty = 1 if (defined($self->{':pretty:'}) && $self->{':pretty:'}); + my $content = JSON::XS->new()->allow_blessed(1)->convert_blessed(1) + ->pretty($pretty)->encode($self->{':prefs:'}); + print(FILE "$content\n"); + close(FILE); + $self->{':file:'} = $file; + return 1; + } else { + carp("No file to save to!"); + } + return 0; +} + +# Return an array of the fields in the preferences object. This does +# not iterate through subobjects at the moment, it only does the first +# level of fields. +sub fields { + my $self = shift; + my @fields = (); + foreach my $key (keys %{$self->{':prefs:'}}) { + push(@fields, $key); + } + return @fields; +} + +# Get the value of a field. You can use this method to get the value +# of a preference field whose name matches another JSONPrefs method. +sub get { + my $self = shift; + my $field = shift; + if (ref($self->{':prefs:'}->{$field}) eq 'HASH' + && !blessed($self->{':prefs:'}->{$field})) { + my $temp->{':prefs:'} = $self->{':prefs:'}->{$field}; + bless($temp, blessed($self)); + $self->{':prefs:'}->{$field} = $temp; + } + return $self->{':prefs:'}->{$field}; +} + +# Set the value of a field. You can use this method to set the value +# of a preference field whose name matches another JSONPrefs method. +sub set { + my $self = shift; + my $field = shift; + return $self->{':prefs:'}->{$field} = shift; +} + +# Use field names like methods. Also blesses any hashref members to +# return them as JSONPrefs objects. +sub AUTOLOAD { + my $self = shift; + my $type = ref ($self) || croak "$self is not an object"; + my $field = $AUTOLOAD; + $field =~ s/.*://; + if (@_) { + return $self->set($field, @_); + } else { + return $self->get($field); + } +} + +# Used by JSON::XS to convert blessed JSONPrefs back into hashrefs. +sub TO_JSON { + my $self = shift; + return {%{$self->{':prefs:'}}}; +} + +1; diff --git a/KCLS/bs_files/bs_bib_load.sql b/KCLS/bs_files/bs_bib_load.sql new file mode 100644 index 0000000000..45850addb5 --- /dev/null +++ b/KCLS/bs_files/bs_bib_load.sql @@ -0,0 +1,206 @@ +SET CLIENT_ENCODING TO 'UNICODE'; + +BEGIN; + +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01381cam a2200349Ia 4500ocn798409621OCoLC20121228212212.0120628s2012 nju e 001 0 eng d 2012947904016117955Uk9781118408292 (pbk.)1118408292 (pbk.)(OCoLC)798409621UKMGBengUKMGBOCLCOSINLBILCYDXCPNTGUtOrBLWNTGA005.446523005.4465 RIZRizzo, John.OS X Mountain Lion server for dummies /by John Rizzo.Hoboken, N.J. :Wiley,c2012.xx, 380 p. ;24 cm.Includes index.OS X Mountain Lion server is the easy way to tame your network. This book shows you how to get it up and running, and how to set up all the features to harness its power for your home or office.Introduction --Getting Mountain Lion server up and running --Creating and maintaining user accounts and directories --Serving up files and printers --Facilitating user collaboration --Managing clients --The part of tens.Mac OS.Operating systems (Computers)Macintosh (Computer)Programming.lmc2012-12-21aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02200cam a2200397Ma 4500ocn794816001OCoLC20130614123744.0120522s2012 cc a e 001 0 eng d016095133Uk7928801789781449316136 (pbk.)1449316131 (pbk.)(OCoLC)794816001(OCoLC)792880178UKMGBengUKMGBBTCTABDXYDXCPOCLCOJSTEINCPCOOILCUtOrBLWNTGATR267.5.A33B788 2012006.68623006.686 BRUBrundage, Barbara(Barbara K.)Photoshop elements 11 :the missing manual /Barbara Brundage.Photoshop elements eleven.1st ed.Beijing ;Sebastopol, CA :O'Reilly,2012.xxv, 630 p. :chiefly col. ill. ;24 cm.The missing manualInclude index.Whether you're a photographer, scrapbooker, or aspiring graphic artist, Photoshop Elements is an ideal image-editing tool-- once you know your way around. This guide removes the guesswork, and provides jargon-free advice and step-by-step guidance.Introduction to Elements. Finding your way around Elements ;Importing, managing, and saving photos ;Rotating and resizing photos --Elemental elements. The quick fix ;Making selections ;Layers: the heart of elements --Retouching. Basic image retouching ;Elements for digital photographers ;Retouching: fine-tuning images ;Removing and adding color ;Photomerge: creating panoramas, group shots, and more --Artistic elements. Drawing with brushes, shapes, and other tools ;Filters, actions, layer styles, and gradients ;Text in elements --Sharing images. Creating projects ;Printing photos ;Email and the Web ;Online albums and slideshows --Additional elements. Beyond the basics --Appendices. Installation and troubleshooting.Adobe Photoshop elements.PhotographyDigital techniques.Missing manual.lmc2012-12-21aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01786cam a2200397Ia 4500ocn800644386OCoLC20121228211350.0120702s2012 njua e 001 0 eng d 2012948656016122474Uk9781118374887 (pbk.)1118374886 (pbk.)(OCoLC)800644386UKMGBengUKMGBEINCPYDXCPVP@ILCBWXJO3UtOrBLWNTGA006.75423006.754 HARHarvell, Ben,1981-Facebook /by Ben Harvell.Hoboken, N.J. :John Wiley & Sons Inc.,c2012.305 p. :col. ill. ;24 cm.Teach yourself visuallyVisual read less-learn more"The fast and easy way to learn" --Cover.Includes index.Provides clear, step-by-step instructions for tackling more than 185 Facebook tasks. Each task-based spread covers a single technique, sure to help you get up and running on Facebook in no time.Setting up an account --Setting security --Setting privacy --Finding and organizing friends --Setting your status --Communicating with friends --Using timeline and news feed --Using groups and events --Sharing photos, video, and music --Using apps --Working with notes --Using search and notifications --Accessing Facebook mobile --Using location services on Facebook --Understanding Facebook ads and pages.Facebook (Electronic resource)Online social networks.Teach yourself visually.Visual read less, learn more.lmc2012-12-21aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01007cam a22003618a 4500ocn775418885OCoLC20121228220517.0120802s2012 nyu e 001 0 eng 201202924497803995376910399537694(OCoLC)775418885DLCengDLCBTCTABDXOCLCOOCOIK2YDXCPNTGUtOrBLWpccNTGABF698.35.I59D46 2012155.2/3223155.232 DEMDembling, Sophia.The introvert's way :living a quiet life in a noisy world /Sophia Dembling.1st ed.New York :Penguin Group,2012.ix, 198 p. ;19 cm."A Perigee book."Includes index.Introversion.Introverts.Interpersonal relations.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01813cam a2200397 a 4500ocn774496036OCoLC20121228203031.0120531s2012 cau e b 000 0 eng 20120219439781608823482 (pbk. : alk. paper)1608823482 (pbk. : alk. paper)(OCoLC)7744960361348902QBIDLCengDLCYDXBTCTABDXOCLCOYDXCPBWXKAAQBXNTGUtOrBLWpccNTGABF637.C74L38 2012158.223158.2 LAVLavender, Neil J.Impossible to please :how to deal with perfectionist coworkers, controlling spouses, and other incredibly critical people /Neil J. Lavender, Alan Cavaiola.Subtitle on the cover:Make no mistakeOakland, CA :New Harbinger Publications,c2012.iv, 182 p. ;23 cm.Includes bibliographical references (p. 181-182).Controlling perfectionism explained --The controlling perfectionist as romantic partner, as parent, and in the workplace --How the controlling perfectionist has affected you --Recognizing what you can and can't do --Setting limits and boundaries --Establishing better communication --Handling controlling perfectionists in romantic relationships, family life, and friendships --Handling controlling perfectionists in the workplace --Seeking qualified professional help.Criticism, Personal.Perfectionism (Personality trait)Interpersonal conflict.Interpersonal relations.Cavaiola, Alan A.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01514cam a2200397 a 4500ocn764313828OCoLC20121228091953.0120409s2012 flu e b 000 0deng 20120117069781616386122 (trade paper)1616386126 (trade paper)1616387204 (e-book)6387203 (e-book)(OCoLC)764313828DLCengDLCBTCTANTGUtOrBLWpccNTGABV5091.V6G99 2012236/.123236.1 GYWGwyn, Liz.Amazing stories of life after death /Liz Gwyn.1st ed.Lake Mary, Fla. :Charisma House,c2012.xxxi, 189 p. ;22 cm.Includes bibliographical references.Mr. Smith --Comatose --Moses --Doug's revenge --Huge reptile --Highway 21 --Aneurysm --The bear --Yes, Lord --Z-28 --A little boy, a grandpa, and Jesus --Four-year-old and an angel --RSVP --Angels in the room --Jesus in the ambulance --Cole and Jesus --Twisted car --Fiery pit --AT-6 war plane --Tunnel --Field of flowers --The cage --Warning --Leroy --His splendor.Visions.Supernatural.Miracles.Near-death experiencesReligious aspectsChristianity.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01706cam a2200361Ia 4500ocn801965682OCoLC20121228081648.0120727s2012 scu e b 000 0 eng d9781470158484 (pbk.)1470158485 (pbk.)(OCoLC)801965682JBLJBLJBLNTGUtOrBLWn-us---NTGA323.32942097323323.32942 ROBRobin, Daniel K.Libertarian war on poverty :repairing the ladder of upward mobility /Daniel K. Robin.[Charleston, S.C. :CreateSpace],2012.xviii, 201 p. ;23 cm.Includes bibliographical references.The author presents his opinions and perspectives on how laws have impeded those trying to climb out of poverty and how the concepts of liberty and freedom must be the foundation for any viable solution to poverty.Why help the poor? --Champions of the poor --Making it hard to work --Migration policy and poverty --Choosing education to end poverty --The war on drugs meets the war on poverty --Occuaptional licensure and excessive regulation --Health care --Where have all the savings gone? --Housing --Economic growth : the mother of all anti-poverty programs --Freedom is the champion of the little guy.PovertyUnited States.PoorUnited States.LibertarianismUnited States.United StatesSocial policy.United StatesPolitics and government2009-bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01214cam a22003614a 4500ocn815456189OCoLC20130614123744.0121030s2013 njua e 001 0 eng d 201294892111183564039781118356401(OCoLC)815456189UOKUOKILCBKLSRCUtOrBLWNTGAHG179.N42693 2013332.0240028553623332.024002 NELNelson, Stephen L.,1959-Quicken 2013 for dummies /by Stephen L. Nelson.Hoboken, N.J. :Wiley,c2013.xvi, 368 p. :ill. ;24 cm.--For dummiesIncludes index.Introduction --Zen, Quicken, and the big picture --The absolute basics --Home finances --Very serious business --The part of tens.Learn to keep your finances in order the easy way, with Quicken 2013-- and this handy guide!Quicken (Computer file)Finance, PersonalComputer programs.--For dummies.lmc2012-12-21aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01373cam a2200349Ia 4500ocn820738104OCoLC20121228210827.0121210s2012 cauab e 000 0 eng d73041440097819363656231936365626(OCoLC)820738104(OCoLC)730414400NYPICNYPICIAMCXPOCLCOBTCTABDXYDXCPNTGUtOrBLWn-us---NTGA362.870973 STAStabile, Gabriele.Refugee hotel /photographs by Gabriele Stabile ; text by Juliet Linderman.San Francisco :McSweeneys Books,c2012.319 p. :chiefly ill. (some col.), maps ;14 x 21 cm.Voice of witnessRefugee Hotel is a collection of photography and interviews that documents the arrival of refugees in the United States. Images are coupled with moving testimonies from people describing their first days in the U.S., the lives they've left behind, and the new communities they've since created.RefugeesUnited StatesPortraits.RefugeesUnited StatesInterviews.Documentary photography.Linderman, Juliet.Voice of witness.csr2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00879nam a2200325Ia 4500ocn811770216OCoLC20121228204922.0121003s2011 caua e 000 0aeng d97806154821250615482120(OCoLC)811770216NTGTEFUtOrBLWn-us-caNTGA363.209223363.2092 SUMSumner, Ted.Deep cover cop /Ted Sumner & Mills Crenshaw.Limited 1st ed.[California? :s.n.],c2011.vi, 401 p. :ill. (some col.) ;24 cm.Sumner, Ted.PoliceCaliforniaBiography.Undercover operationsCalifornia.Crenshaw, Mills.lmc2012-12-21aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01282cam a2200373 a 4500ocn793221822OCoLC20130614123744.0120518s2012 mnua e 000 0 eng 20120186379780873518703 (cloth : alk. paper)0873518705 (cloth : alk. paper)9780873518710 (e-book)0873518713 (e-book)(OCoLC)793221822DLCengDLCYDXBTCTABDXYDXCPOCLCOBWXGPIVP@NTGUtOrBLWpccn-us-mnNTGAHV6533.M6S93 2012364.152/309223364.152309 SWASwanson, William,1945-Black, white, blue :the assassination of patrolman Sackett /William Swanson.St. Paul, MN :Borealis Books,c2012.251 p. :ill. ;24 cm.Young men and murder --A very cold case --The burden of proof --Afterword --Acknowledgments.Sackett, James,1942-1970.Murder victimsMinnesotaSaint PaulCase studies.MurderMinnesotaSaint PaulCase studies.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02358cam a2200397 i 4500ocn776523510OCoLC20121228224241.0120706s2012 nyuab e 001 0 eng 20120222349780316208192 (hardback)0316208191 (hardback)(OCoLC)7765235101348774QBIDLCrdaengDLCYDXBTCTABDXOCLCOYDXCPIEBIK2CDXQBXNTGUtOrBLWpccNTGAQ173.M18 201250023500 MADMad science :Einstein's fridge, Dewar's flask, Mach's speed, and 362 other inventions and discoveries that made our world /edited by Randy Alfred ; from the WIRED blog, This Day in Tech, founded by Tony Long.First edition.New York :Little, Brown and Company,2012.390 pages :illustrations, map ;22 cmtextrdacontentunmediatedrdamediavolumerdacarrierIncludes index."365 days of inventions, discoveries, science, and technology, from the editors of Wired Magazine. On January 30, Rubik applied for a patent on his cube (1975). On the next day, 17 years earlier, the first U.S. Satellite passed through the Van Allen radiation belt. On March 17, the airplane "black box" made its maiden voyage (1953). And what about today? Every day of the year has a rich scientific and technological heritage just waiting to be uncovered, and Wired's top-flight science-trivia book MAD SCIENCE collects them chronologically, from New Year's Day to year's end, showing just how entertaining, wonderful, bizarre, and relevant science can be. In 2010, Wired's popular "This Day in Tech" blog peaked with more than 700,000 page views each month, and one story in 2008 drew more than a million unique viewers. This book will collect the most intriguing anecdotes from the blog's run-one for each day of the year-and publish them in a package that will instantly appeal to hardcore techies and curious laypeople alike. "--Provided by publisher.ScienceMiscellanea.ScienceBlogs.Alfred, Randy,editor.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01748cam a22004218i 4500ocn781432393OCoLC20121228125419.0120308s2013 ilua e b 001 0 eng c 20120102139780226682952 (hardcover : alkaline paper)0226682951 (hardcover : alkaline paper)9780226923789 (e-book)0226923789 (e-book)(OCoLC)781432393ICU/DLCrdaengCGUDLCBTCTABDXOCLCOVKCYDXCPLF3CDXABGVP@BWXORXNTGUtOrBLWpccNTGAQD13.P75 2013540.1/1223540.112 PRIPrincipe, Lawrence,author.The secrets of alchemy /Lawrence M. Principe.Chicago ;London :University of Chicago Press,2013.v, 281 pages, [8] pages of plates :illustrations (some color) ;24 cmtextrdacontentunmediatedrdamediavolumerdacarrierSynthesisIncludes bibliographical references (pages 213-271) and index.What is alchemy? --Origins : Graeco-Egyptian chemeia --Development : Arabic al-kīmiyā --Maturity : Medieval Latin alchemia --Redefinitions, revivals, and reinterpretations : alchemy from the eighteenth century to the present --The golden age : practicing chymistry in the early modern period --Unveiling the secrets --The wider world of chymistry.AlchemyHistory.Alchemists.Synthesis (University of Chicago. Press)JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02959cam a2200481Ia 4500ocn802556589OCoLC20121228235430.0120731s2012 enka e b 001 0 eng d016082830Uk78587004797818566974391856697436(OCoLC)802556589(OCoLC)785870047SISPLSISPLSISPLCDXBTCTABDXUKMGBYDXCPILOYNKSINLBNTGUtOrBLWn-us---NTGATS171.4.H46 2012604.223604.2 HENHenry, Kevin.Drawing for product designers /Kevin Henry.London :Laurence King,2012.208 p. :ill. ;29 cm.Portfolio skillsIncludes bibliographical references (p. 207) and index.Understanding sketching --The psychology of sketching --Defining sketching. Case study: HLB design diagrams --Tutorial: orthographic projection --Tutorial: orthographic sketching --Orientation. Case study: Gerrit Rietveld's red and blue chair --Tutorial: rotated plan method --Case study: method --Tutorial: (de)constructing the cube --Tutorial: unfolding geometry --Registration. Case study: Myto chair --Case study: Mission One Motorcycle --Tutorial: sketching a tape measure --Form. Case study: TCV display for Olivetti --Tutorial: sketching a contoured bar of soap --Tutorial: sketching the Pringle potato chip --Tutorial: sketching an athletics shoe --Line. Case study: DC25 vacuum cleaner --Case study: vessel ideation --Tutorial: putting line and orientation together --Exploring forms in space. Tutorial: Panton chair --Tutorial: Vållö watering can --Tutorial: sketching tools --Explaining forms in space. Tutorial: fundamentals of rendering --Tutorial: rendering simple forms --Tutorial: rendering complex forms --Case study: Fiskars garden barrow --Exploring forms in time. Case study: Dyson DC25 user's manual --Case study: Golden section information graphics --Tutorial: creating a storyboard --Tutorial: sketching a cellphone --Tutorial: sketching an exhibit --Putting it all together. Tutorial: creating a presentation.Mechanical drawingStudy and teaching (Higher)Mechanical drawingTechnique.Freehand technical sketchingComputer-aided design.DrawingPsychological aspects.DrawingPhilosophy.Rendering (Computer graphics)Visual texture recognition.Technical illustration.Industrial designUnited States21st century.Product designCase studies.Space and timeDesign.Portfolio skills.Product design.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02426cam a2200337Ia 4500ocn809041596OCoLC20130614123744.0120904s2012 cau e 000 0 eng 201101936297814019315201401931529(OCoLC)809041596APLAPLNTGUtOrBLWNTGA613.0424423 613.04244 KOF 2012Koff, Ashley.Mom energy :a simple plan to live fully charged ; from the experts who coach Hollywood's most celebrated moms /Ashley Koff, Kathy Kaehler.2nd ed.Carlsbad, Calif. :Hay House,2012.xviii, 251 p. ;23 cm."From celebrated dietitian Ashley Koff and fitness trainer to the stars Kathy Kaehler comes Mom Energy, an exciting new way for moms to tap into their own natural and renewable sources of energy to overcome fatigue and achieve their personal health goals. Koff and Kaehler have put together a safe, sensible, flexible, and, most importantly, effective program for moms of any age--whether their kids are in diapers or heading off to college.While being a mom is undeniably rewarding, it's also one of the most physically demanding and stressful activities in modern society. In fact, one of the most common complaints from mothers is that they simply don't have the energy to do everything they want, which means they end up sacrificing one thing (usually their own health) to accomplish another. But these sacrifices aren't necessary if you follow some simple guidelines and avoid secret energy saboteurs. With discussions that cover everything from nutrition to fitness to time management, Koff and Kaehler lay out a three-part program (reorganize, rehab, and recharge) that can be molded to any lifestyle to help moms naturally up their energy levels. And with this enhanced energy, they will get all sorts of other unexpected benefits, including a stronger immune system, easier weight loss, and even better relationships. Mom Energy will teach readers what will help and what will hurt in their quest for optimum energy. "--Provided by publisher.WomenHealth and hygiene.Physical fitness.Motherhood.Kaehler, Kathy.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01015cam a22003378a 4500ocn800026941OCoLC20130614123744.0120928s2013 cau e 000 0 eng 20120321089781401940478 (hardback)1401940471 (hardback)(OCoLC)800026941DLCengDLCBTCTAOCLCOBDXNTGUtOrBLWpccNTGARA776.95.N53 201361323613 NICNicolai, Jim,1968-Integrative wellness rules :a simple guide to healthy living /Jim Nicolai.Carlsbad, Calif. :Hay House,c2013.xvii, 247 p. ;24 cm.Shares insights and strategies to optimize health and create wellness.Self-care, Health.Integrative medicine.Health promotion.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01875nam a2200373Ia 4500ocn822993543OCoLC20130614123744.0121228s2013 dcu e f001 0 eng d(OCoLC)822993543WIMWIMNTGUtOrBLWn-us---NTGA629.25323629.222 FUE 2013Fuel economy guide :model year 2013 /U.S. Department of Energy, U.S. Environmental Protection Agency.Model year 2013 fuel economy guide[Washington, D.C.] :U.S. Dept. of Energy, Office of Energy Efficiency and Renewable Energy :U.S. Environmental Protection Agency,[2013]i, 41 p. ;28 cm.Cover title."DOE/EE-0778"--Cover."The U.S. Environmental Protection Agency (EPA) and U.S. Department of Energy (DOE) produce the Fuel Economy Guide to help buyers choose the most fuel-efficient vehicle that meets their needs. Most vehicles in this guide (other than plug-in hybrids) have three fuel economy estimates: a "city" estimate ... a "highway" estimate ... a "combined" estimate. Estimates for all vehicles are based on laboratory testing under standardized conditions to allow for fair comparisons"--P. i.Includes index.AutomobilesUnited StatesFuel consumption.Motor vehiclesUnited StatesFuel consumption.TrucksUnited StatesFuel consumption.United States.Department of Energy.Office of Energy Efficiency and Renewable Energy.United States.Environmental Protection Agency.http://www.fueleconomy.gov/feg/pdfs/guides/FEG2013.pdfhttp://www.fueleconomy.govlmc2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01266cam a2200373Ia 4500ocn778422547OCoLC20121228210313.0120223s2012 enka e b 001 0 eng d9781608199754 (US)1608199754 (US)9781408152645 (UK)1408152649 (UK)(OCoLC)778422547BTCTAengBTCTABDXNLEOCLCOCUVYDXCPBWXVP@NTGUtOrBLWe------n-us---NTGA636.5022223SF505.L44 2012636.50222 LEWLewis, Celia,1948-The illustrated guide to ducks and geese and other domestic fowl :how to choose them, how to keep them /[text and illustrations by Celia Lewis].Ducks and geese and other domestic fowlLondon ;New York :Bloomsbury Publishing,2012.160 p. :col. ill. ;26 cm.Includes bibliographical references and index.Poultry.Poultry breeds.PoultryEuropePictorial works.PoultryUnited StatesPictorial works.csr2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01828cam a2200349Ia 4500ocn793585022OCoLC20130614123744.0120514s2011 scu e b 000 e eng d9781468012507 (pbk.)1468012509 (pbk.)(OCoLC)793585022YDXCPYDXCPBDXCOOJAONTGUtOrBLWNTGAQL82.H25 2012639.922639.9 HANHance, Jeremy Leon.Life is good :conservation is an age of mass extinction /Jeremy Leon Hance.[Charleston, S.C.] :Createspace,c2011.x, 204 p. ;c 21 m.Includes bibliographical references (p. 196-201)Introduction : the life emergency --Meeting Tam in Borneo : our last chance to save the world's smallest rhino --Will jellyfish take over the world? --Why top predators matter --The camera trap revolution : how a simple device is shaping research and conservation worldwide --Nature's greatest spectacle faces extinction --The penguin problem, or stop eating our fish! --What if Noah had left behind the ugly ones? --Zoos : why a revolution is necessary to justify them --The end of the oceans : from bounty to empty --Language and conservation : why words matter --Saving the world's weirdest mammal --Shifting baselines : forgetting the lost --Gone : extinction over the past decade --The anaconda and the fer-de-lance : one day on Suriname's jungle coast.Wildlife conservation.Nature conservation.Endangered species.Extinction (Biology)NatureEffect of human beings on.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02003cam a2200361Ia 4500ocn794040293OCoLC20130614123744.0120519s2012 maua e 001 0 eng d016126998Uk97814405517961440551790(OCoLC)794040293BTCTAengBTCTABDXUKMGBCPLYDXCPUtOrBLWNTGATX147.D463 2012640 DENDenholtz, Charlotte.The modern-day pioneer :simple living in the 21st century /Charlotte Denholtz.Avon, MA :Adams Media,c2012.303 p. :ill. ;19 cm."Contains material adapted and abridged from: The everything guide to root cellaring, by Catherine Abbot ...; The everything small-space gardening book, by Catherine Abbot ...; The everything guide to living off the grid, by Terri Reid ...; The everything soapmaking book, 2nd ed., by Alicia Grosso; The everything candlemaking book, by M.J. Abadie ...; The everything vegetarian cookbook, by Jay Weinstein ...; The everything soup, stew, & chili cookbook, by Belinda Hulin ...; The everything cast-iron cookbook, by Cinnamon Cooper ...; The everything guide to food remedies, by Lorie Rice ...; The everything quilting book, by Sandra Detrixhe ...; The everything guide to herbal remedies, by Martha Schindler Connors with Larry Alshuler ...; The everything bread cookbook, by Leslie Bilderback ...; The everything homebrewing book, by Drew Beechum ...; The everything sewing book, by Sandra Detrixhe"--P. [304]."Sew, cook, quilt, farm, and craft your way to sustainable living!"--Cover.Includes index.Home economics.Home economics, Rural.Handicraft.Sustainable living.Frontier and pioneer lifeMiscellanea.lmc2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01470cam a2200373 a 4500ocn792889585OCoLC20121228210402.0120801s2012 maua e 001 0 eng 2012024855016136002Uk8050186098157404099781603427272 (pbk.)1603427279 (pbk.)(OCoLC)792889585(OCoLC)805018609(OCoLC)815740409DLCengDLCIG#BTCTAOCLCOWIQIK2VP@UKMGBYDXCPNTGUtOrBLWNTGATX601.W45 2012641.423641.4 WEIWeingarten, Matthew.Preserving wild foods :a modern forager's recipes for curing, cannning, smoking, and pickling /Matthew Weingarten and Raquel Pelzel ; photography by Stéphanie de Rougé.North Adams, MA :Storey Pub.,c2012.256 p. :col. ill. ;24 cm.Includes index.Coastline : gifts from the sea --Pastures & hedgeroes : grazing lands and natural borders --Gardens & fields : cultivated and harvested --Forest & woods : foraged, picked, and plucked --Banks & wetlands : freshwater depths and shores.Canning and preserving.FoodPreservation.Cooking (Natural foods)Pelzel, Raquel.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01432cam a2200361 a 4500ocn771652860OCoLC20121228205933.0120106s2012 nyua e 000 0 eng 201104925297815847996271584799625(OCoLC)771652860DLCengDLCYDXBTCTABDXOCLCOYDXCPOCLCQNTGUtOrBLWpccNTGATT387.P365 2012646.2/04423646.2044 FISFishbein, Dena.The painted home by Dena /photographs by John Ellis ; [editor, Wesley Royce].New York :Stewart, Tabori & Chang,c2012.200 p. :col. ill. ;26 cm.Introduction: surround yourself with the things you love --Welcome to Seven Oaks Ranch: entryway and family room --Everyday spaces: kitchen and sunroom --Places to gather: dining room, living room, and den --Cozy nooks: little bedroom and little sewing room --Private havens: master bedroom suite --A room of one's own: hallway and upstairs bedrooms --Inspirational spaces: studio and garden --Entertaining.House furnishings.Machine sewing.Ellis, John.Royce, Wesley.Dena Designs.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01563cam a2200385 a 4500ocn785873735OCoLC20121228165835.0120512s2012 cau e b s000 0 eng 2012010493016117483Uk8116476349780520270237 (cloth : acid-free paper)0520270231 (cloth : acid-free paper)(OCoLC)785873735(OCoLC)811647634DLCengDLCIG#BTCTABDXUKMGBOCLCOVP@YDXCPBWXZCUZAGPULAU@NTGUtOrBLWpccNTGARJ216.C652 2012649/.3323649.33 COBCobb-Barston, Suzanne Michaels,1978-Bottled up :how the way we feed babies has come to define motherhood, and why it shouldn't /Suzanne Barston.Berkeley :University of California Press,c2012.x, 211 p. ;22 cm.Includes bibliographical references (p. 185-211).Preconceived notions --Lactation failures --Of human bonding --The dairy queens --Damn lies and statistics --Soothing the savage breast.Discusses the issue of breast feeding and whether it is fair to judge parenting on breast vs. bottle as opposed to making the right choice for a family.Breastfeeding.BreastfeedingComplications.BreastfeedingSocial aspects.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01110cam a22003494a 4500ocn781679234OCoLC20121228095300.0120816s2012 caua e b 001 0 eng 20120328959781609945282 (pbk.)160994528X (pbk.)(OCoLC)781679234DLCengDLCBTCTABDXJAIYDXCPCDXIG#BWXVP@NTGUtOrBLWpccNTGAHD62.5.C6353 2012658.1/123658.11 COHCohan, Peter S.,1957-Hungry start-up strategy :creating new ventures with limited resources and unlimited vision /Peter S. Cohan.San Francisco :Berrett-Koehler Publishers,c2012.xiv, 244 p. :ill. ;24 cm.Includes bibliographical references and index.New business enterprises.Strategic planning.Venture capital.Entrepreneurship.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01296cam a2200349Ia 4500ocn778419462OCoLC20130614123745.0120223s2012 txua e b 001 0 eng d9781937856106 (pbk.)1937856100 (pbk.)(OCoLC)778419462BTCTAengBTCTABDXKNJYDXCPGK7OCLCOCXPIFJCDXNTGUtOrBLWNTGAHF5415.5.F59 2012658.81223658.812 FLYFlynn, Anthony,1983-Custom nation :why customization is the future of business and how to profit from it /Anthony Flynn, Emily Flynn Vencat ; with Dennis Flynn.Dallas Texas :BenBella Books, Inc.,c2012.xii, 240 p. :ill. ;21 cm.Includes bibliographical references (p. 193-202) and index.Explains how customization can make any business stand apart and generate market share, increase profit margins, and develop customer loyalty.Market segmentation.Entrepreneurship.Success in business.Vencat, Emily Flynn.Flynn, Dennis C.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01408cam a2200373Ia 4500ocn781679065OCoLC20130614123745.0120321s2012 ii a e 000 0aeng d97893803401739380340176(OCoLC)781679065BTCTAengBTCTAYDXCPKNJNTGUtOrBLWengtama-ii---NTGA741.092 TEJTejubehan(Singer)Drawing from the city :based on the oral stories of /Tejubehan ; original Tamil text: Saalai Selvam ; English text: V. Geetha & Gita Wolf.Chennai, India :Tara Books,c2012.1 v. (unpaged) :ill. ;37 cm.Teju Behan is a singer and self taught urban folk artist from Ahmedabad in western India who describes her life of poverty until a job working as a singer with a fellow artist led her to discover her own artistic talent.Tejubehan(Singer)Folk artistsIndiaAhmadābādBiography.Women artistsIndiaAhmadābādBiography.SingersIndiaAhmadābādBiography.Folk artIndiaAhmadābād.Selvam, Saalai.Kītā, Va.Wolf, Gita.csr2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01158cam a2200325Ma 4500ocn809154144OCoLC20121228213702.0040813s2012 gaua e 6 000 1deng 9781603092654160309265X(OCoLC)809154144AU@engAU@OCLCOUtOrBLWn-us---NTGA741.597322741.56973 KOL V.4Kochalka, James.American elf :the collected sketchbook diaries of James Kochalka.Book four,January 1, 2008 to December 31, 2011.Marietta, Ga. :Top Shelf,c2012.1 v. (unpaged) :chiefly ill. ;22 cm.Collects five years of the semi-autobiographical online comic strip diary American Elf in which the author depicts himself as an elf.Kochalka, JamesDiariesComic books, strips, etc.CartoonistsUnited StatesDiariesComic books, strips, etc.Comic books, strips, etc.Graphic novels.lmc2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02374cam a2200445Ia 4500ocn795182099OCoLC20130111155049.0120530s2012 orua e 6 000 f eng d016099256Uk9781595829429 (pbk.)1595829423 (pbk.)(OCoLC)795182099UKMGBengUKMGBBDXOCLCQNTGUtOrBLWengjpnNTGA741.523741.5952 MIU V.36Miura, Kentarō.Beruseruku.EnglishBerserk.36 /by Kentaro Miura ; translation, Duane Johnson ; lettering and retouch, Replibooks.Milwaukie, Or. :Dark Horse Manga ;London :Diamond [distributor],2012.1 v. (unpaged) :chiefly ill. ;19 cm.Translated from the Japanese with katakana and English sound effects."First published in Japan in 2011 by Hakusensha, Inc., Tokyo"--T.p. verso."English-language translation © 2012 by Dark Horse Comics, Inc. and Digital Manga Inc."--T.p. verso."This collection is translated into English but oriented in right-to-left reading format, as originally published"--P. [4] of cover."Parental advisory: explicit content"--P. [1] of cover."For readers 18+. For mature readers"--P. [4] of cover."What Guts the Black Swordsman and his companions had seen as an island refuge from monstrous sea creatures and the ghost ship of the demonic Bonebeard is in fact the very lair of the malefic sea god that controls them. With no avenue of escape, Guts must once again don the Berserker armor to give his company any chance of survival. But without the protective influence of the sorceress Schierke, Guts risks being forever lost within the cursed armor and becoming an even greater threat to his comrades"--P. [4] of cover.SwordsmenComic books, strips, etc.Imaginary wars and battlesComic books, strips, etc.Fantasy comic books, strips, etc.Graphic novels, JapaneseTranslations into English.localComic books, strips, etc.Japan.Johnson, Duane,1976-trlslh2013-01-11aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01313cam a2200361 a 4500ocn775416420OCoLC20121228003207.0120416s2012 quca e 6 000 0 eng 201290242369781770460874177046087X(OCoLC)775416420NLCengNLCBTCTACDXVP@YDXCPOCOBDXBKXBWXZCULIVNTGUtOrBLWn-us-nyNTGAPN6727.T65N49 2012741.5/97323741.5973 TOMTomine, Adrian,1974-New York drawings :a decade of covers, comics, illustrations, and sketches from the pages of The New Yorker and beyond /Adrian Tomine.1st ed.Montreal :Drawn & Quarterly,2012.175 p. :chiefly col. ill. ;29 cm.Includes drawings previously published in The New Yorker.Collects the artist's comics, illustrations, and covers produced for The New Yorker magazine, as well as other uncollected works inspired by New York City.New York (N.Y.)Comic books, strips, etc.Graphic novels.New Yorker.vf2012-12-27aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01631cam a22004214a 4500ocn759908528OCoLC20121228152849.0111213s2012 nyua e b 001 0 eng 2011046776016102620Uk9780307587107 (alk. paper)030758710X (alk. paper)9780307587114 (ebook)0307587118 (ebook)(OCoLC)759908528DLCengDLCYDXBTCTABDXYDXCPUKMGBOCLCOBWXCZACDXNTGUtOrBLWpccNTGAHF5439.H27R37 2012745.506823745.5068 RANRand, Kelly,1979-Handmade to sell :Hello Craft's guide to owning, running, and growing your crafty biz /by Kelly Rand ; with Christine Ernest ... [et al.] ; illustrations by Jaime Zollars.1st ed.New York :Potter Craft,c2012.175 p. :ill. ;21 cm.Includes bibliographical references and index.An all-encompassing guide to starting and running a successful craft business draws on the expertise of the well-known nonprofit trade organization and provides authoritative coverage of everything from developing successful product lives and preparing taxes to forming LLCs.SellingHandicraft.HandicraftMarketing.Small businessManagement.Ernest, Christine.Zollars, Jaime.vf2012-12-27aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01622cam a2200409Ia 4500ocn751834983OCoLC20121228205355.0110823s2011 ohua e 001 0 eng dGBB1A5955bnb015885121Uk7072124977600823677946926829781440315244 (hbk.)1440315248 (hbk.)(OCoLC)751834983(OCoLC)707212497(OCoLC)760082367(OCoLC)794692682UKMGBengUKMGBWIQHCOYDXCPBKXQBXTOHBDXUKWOHBTCTABWXNTGUtOrBLWNTGAND1351.6.A77 2011ARTeflch758.173092223758.173092 ARTArt journey America :landscapes : 89 painters' perspectives /edited by Kathy Kipp.Landscapes :89 painters' perspectives1st ed.Cincinnati, Ohio :North Light Books,c2011.192 p. :col. ill. ;31 cm.Includes index.Showcasing the work of 100 top contemporary American master artists of our day, this book features landscapes from all across the country - east and west, north and south - rendered in watercolour, oil, acrylic, pastel and mixed media.Landscape painting, American20th century.Landscape painting, American21st century.Landscape paintersUnited StatesInterviews.Kipp, Kathryn,1946-North Light Books (Firm)csr2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01323cam a2200385Ia 4500ocm58602313 OCoLC20130215212952.0jstn r sd fungnnmm|||050223s2004 cauag e 000 0 eng d9780739036358073903635103808123297323197Alfred Pub. Co.23198Alfred Pub. Co.(OCoLC)58602313RBNRBNWTXMDYBAKERBTCTAORXBDXYDXCPNTGUtOrBLWNTGAMT7.S87A44x 2004781.2SURMANI781.2 SURSurmani, Andrew.Alfred's essentials of music theory :a complete self-study course for all musicians /Andrew Surmani, Karen Farnum Surmani, Morty Manus.Essentials of music theoryVan Nuys, CA :Alfred Pub. Co.,c2004.151 p. :ill., music ;30 cm. +2 sound discs (digital ; 4 3/4 in.)"For pianists, guitarists, instrumentalists, vocalists, songwriters, arrangers & composers"--Cover.Music theorySelf-instruction.Surmani, Karen Farnum.Manus, Morton.csr2013-02-15aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01208cam a2200325 a 4500ocn798059648OCoLC20121228154005.0121109s2012 cau e b 001 0 eng 20120409049781593765118 (pbk.)1593765118 (pbk.)(OCoLC)798059648DLCengDLCIG#BTCTABDXYDXCPOCLCOBWXNTGUtOrBLWNTGAPN1997.P793R43 2012791.43/7223791.43023 REBRebello, Stephen.Alfred Hitchcock and the making of Psycho /Stephen Rebello.Berkeley, CA :Soft Skull Press,c2012.288 p. ;23 cm.Includes bibliographical references (p. [269]-273) and index.The awful truth --The novel --The director --The deal --The screenplays --Preproduction --Shooting --Postproduction --Publicity --The release --Afterglow and aftermath.Psycho (Motion picture : 1960)Hitchcock, Alfred,1899-1980Criticism and interpretation.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02267cam a2200349 a 4500ocn776526126OCoLC20121228182145.0120503s2012 nyua e 000 0ceng 2012016146016131546Uk97814555161311455516139(OCoLC)776526126DLCengDLCYDXBTCTABDXYDXCPUKMGBOCLCOWIHRCJZAGCDXQBXCGPNTGUtOrBLWNTGAGV697.A1J478 2012796.08992423796.089924 JEWJewish jocks :an unorthodox hall of fame /edited by Franklin Foer and Marc Tracy.1st ed.New York :Twelve,2012.xiv, 285 p. :ill. ;24 cm.With contributions from celebrated Jewish writers including David Remnick, Jonathan Safran Foer, and Dahlia Lithwick, provides an overview of the most influential Jewish figures in sports, from Howard Cosell to Sandy Koufax.Daniel Mendoza : the king's pugilist /Simon Schama --Max Nordau : philosopher of the muscle Jews /Timothy Snyder --Barney Sedran : tiny baller /Rebecca Newberger Goldstein --Benny Leonard : Mama said knock you out /Franklin Foer --Mose Solomon : the hunt for the Hebrew Ruth /Robert Weintraub --Whitey Bimstein : cutman /Douglas Century --Sidney Franklin : matador from Flatbush /Tom Rachman --Arnold Rothstein : American Shylock /Ron Rosenbaum --Barney Ross : Kaddish for a welterweight /Buzz Bissinger --Marty Reisman : ping-pong wizard /Howard Jacobson --Hank Greenberg : the plot against Greenberg? /Ira Berkow --Helene Mayer : fencing for Hitler /Joshua Cohen --Al Rosen : I'm not Greenberg /David Margolick --Sid Luckman : Hebrew mind, cossack body /Rich Cohen --Grigory Novak : Soviet strongman /David Bezmozgis --Jack Molinas : the point-shaver /Chad Millman --Dolph Schayes : power forward /Marc Tracy --Red Auerbach : the coach who never paid retail /Steven PinkerJewish athletesBiography.Foer, Franklin.Tracy, Marc.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01050cam a2200301Ia 4500ocn820193187OCoLC20130614123745.0121204s2012 cau e b 000 0 eng d9780983632948 (pbk.)0983632944 (pbk.)(OCoLC)820193187GO6GO6GO6NTGUtOrBLWNTGA808.3808.3 LOWLowenkopf, Shelly.The fiction writer's handbook :[the definitive guide to McGuffins, red herrings, shaggy dogs, and other literary revelations from a master] /Shelly Lowenkopf ; foreword by Christopher Moore.Los Angeles, CA :White Whisker Books,2012.334 p. ;23 cm.Includes bibliographical references (p. 329-333).Short entries describing the terms and processes used in writing fiction.FictionTechnique.Creative writing.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02291cam a2200385 a 4500ocn793911008OCoLC20130614123745.0120511s2012 arua e b 000 0 eng 2012018195016230529Uk1935106503 (hardcover : alk. paper)9781935106500 (hardcover : alk. paper)(OCoLC)793911008DLCengDLCYDXBTCTAYDXCPBWXBDXCDXORXKCPIXAUKMGBVP@UtOrBLWpccNTGAPS3566.O663A6 2012813/.5423818.54 PORPortis, Charles.Works.Selections.2012Escape velocity :a Charles Portis miscellany /edited and with an introduction by Jay Jennings ; cover art and illustrations by Mike Reddy.Little Rock, Ark. :Butler Center for Arkansas Studies,c2012.xxii, 358 p. :ill. ;24 cm.Includes bibliographical references.Elected newspaper reporting and writing. Memphis commercial appeal(1958) --Arkansas gazette(1959-1960) --New York herald tribune(1960-1964) --General assignment --Rights reporting --London bureau --Travels --That new sound from Nashville --An auto odyssey through darkest Baja --The forgotten river --Motel life, lower reaches --Short stories. Your action line --Nights can turn cool in Viborra --I don't talk service no more --The wind bloweth where it listeth --Memoir. Combinations of Jacksons --Drama. Delray's new moon --Epilogue. interview[s] --Gazette project interview (by Roy Reed) --Tributes. Comedy in earnest (by Roy Blount Jr.) --Like Cormac McCarthy, but funny (by Ed Park) --Our least-known great novelist (by Ron Rosenbaum) --Afterword to True grit (by Donna Tartt) --The book that changed my life: Gringos (by Wells Tower).Brings together Portis' writings other than his four novels, including journalism, travel stories, short fiction, memoir, and even a play.Portis, CharlesAppreciation.Jennings, Jay,1957-Reddy, Mike.lmc2012-12-21aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01306cam a2200349 a 4500ocn777660045OCoLC20121228181933.0120529s2012 nyua e b 000 0 eng 20120216299781590515662 (hc. : acid-free paper)1590515668 (hc. : acid-free paper)9781590515679 (ebook)1590515676 (ebook)(OCoLC)777660045DLCengDLCYDXBTCTABDXOCLCOYDXCPOPWNTGUtOrBLWNTGAPQ2631.R63Z78925 2012843/.91223843.912 MUHMuhlstein, Anka.Monsieur Proust's library /Anka Muhlstein.New York :Other Press,c2012.xiv, 141 p. :ill. ;22 cm.Includes bibliographical references.First impressions and lasting influences --Foreign incursions --Good readers and bad readers --A homosexual reader: Baron de Charlus --Racine: a second language --The Goncourts --Bergotte: the writer in the novel.Proust, Marcel,1871-1922Books and reading.Proust, Marcel,1871-1922Characters.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02410cam a2200433Ia 4500ocn783162529OCoLC20130614123745.0120330s2012 enkab e b 001 0 eng d 2011945993016122239Uk8132225789780500051733 (cloth)0500051739 (cloth)(OCoLC)783162529(OCoLC)813222578BTCTAengBTCTABDXUKMGBYDXCPBWKYNKHLSOCLCOHHOCDXORXAU@BWXJCUOCLCOYYPCUTMUUNTGUtOrBLWf-ua---NTGADT73.T25K46 2012932.01423932.014 KEMKemp, Barry J.The city of Akhenaten and Nefertiti :Amarna and its people /Barry Kemp.London :Thames & Hudson,c2012.320 p. :ill. (some col.), maps ;26 cm.New aspects of antiquityIncludes bibliographical references (p. 306-313) and index.The ancient site of Tell el-Amarna in Middle Egypt was the capital city of the heretic pharaoh Akhenaten and his chief consort, Nefertiti. Occupied for just sixteen or so years in the fourteenth century BC, the city lay largely abandoned and forgotten until excavations over the last hundred years brought it back into prominence. Based on more than three decades of research and excavation by Barry Kemp, this account provides new insight into Amarna and its people.The author brings to life the royal family and their offspring, including Tutankhamun, as well as prominent citizens such as the high priest Panehsy, the vizier Nakht, the general Ramose, and the sculptor Thutmose.The Cast of characters --City of the horizon --Building a vision --Akhenaten's resources --The city of the Sun-God --The apartments of the Pharaoh --City of people --The quality of life --Spiritual life at Amarna --What kinds of city? --An end and a beginning.Tell el-Amarna (Egypt)History.Akhenaton,King of Egypt.Nefertiti,Queen of Egypt,active 14th century B.C.PharaohsBiography.EgyptHistoryEighteenth dynasty, ca. 1570-1320 B.C.New aspects of antiquity.csr2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01890cam a2200457 a 4500ocn778636572OCoLC20121228201118.0120213s2012 nyu e b 001 0 eng 2012004336016167579Uk8099360499780199916986 (alk. paper)0199916985 (alk. paper)9780199916993 (ebk.)0199916993 (ebk.)(OCoLC)778636572(OCoLC)809936049DLCengDLCYDXBTCTAYDXCPOCLCOUKMGBBWXCUTVVCCOONTGUtOrBLWpcca-is---awba---NTGAJZ5540.G67 2012956.05092223956.050922 GOPGopin, Marc.Bridges across an impossible divide :the inner lives of Arab and Jewish peacemakers /Marc Gopin.New York :Oxford University Press,c2012.ix, 241 p. ;22 cm.Includes bibliographical references and index.Pt. 1.Self examination and identity. Ibtisam Mahameed: an introductory Palestinian case study. Eliyahu McLean: an introductory Israeli Jewish case study --pt. 2.Peacemakers in their own words. Sheikh Abdul Aziz Bukhari. Gabriel Meyer. Ihab Balha. Hind Kabawat. Marc Gopin --pt. 3.The inner life of peacemakers. A summary of the peacemakers' transformative qualities. The inner life of the peacemaker and the future of global changePeace-buildingIsrael.Peace-buildingPalestine.Reconciliation.Conflict management.CommunicationSocial aspects.Arab-Israeli conflict1993-Peace.IsraelEthnic relations.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01996cam a2200361 a 4500ocn778074340OCoLC20121228202404.0120222s2012 nyua e b 000 0deng 20120049979780374298807 (hbk.)0374298807 (hbk.)(OCoLC)778074340DLCengDLCIG#BTCTABDXUKMGBOCLCOJAIYDXCPBURCDXZQPBWXNTGUtOrBLWNTGAPR4582.G88 2012823/.823B DICKENSGottlieb, Robert,1931-Great expectations :the sons and daughters of Charles Dickens /Robert Gottlieb.Sons and daughters of Charles Dickens1st ed.New York :Farrar, Straus and Giroux,2012.239 p. :ill. ;22 cm.Includes bibliographical references (p. [243-244]).Charles Dickens, famous for the indelible child characters he created--from Little Nell to Oliver Twist and David Copperfield--was also the father of ten children (and a possible eleventh). What happened to those children is the fascinating subject of Robert Gottlieb's Great Expectations. With sympathy and understanding he narrates the highly various and surprising stories of each of Dickens's sons and daughters, from Kate, who became a successful artist, to Frank, who died in Moline, Illinois, after serving a grim stretch in the Royal Canadian Mounted Police. Each of these lives is fascinating on its own; together they comprise a unique window on Victorian England as well as a moving and disturbing study of Dickens as a father and as a man.--From publisher description.Dickens, Charles,1812-1870Family.Children of authorsEngland19th centuryBiography.Authors, English19th centuryFamily relationships.csr2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01636cam a2200457 i 4500ocn818985184OCoLC20121228085108.0121115t20122012miuaf e 000 0aeng 20120423898110071489780310740612 (hardcover)0310740614 (hardcover)(OCoLC)818985184(OCoLC)811007148DLCengrdaDLCOCLCOOEMBTCTAZQPUPZOPWPCXYDXCPTXANTGUtOrBLWpccn-us---NTGAGV460.2.D68A3 2012796.44092B23B DOUGLASDouglas, Gabrielle,1995-Grace, gold & glory :my leap of faith /Gabrielle Douglas ; with Michelle Burford.Grace, gold and gloryGrand Rapids, Michigan :Zondervan,[2012]©2012222 pages, [8] pages of plates :color illustrations ;24 cmtextrdacontentunmediatedrdamediavolumerdacarrierThe U.S. gymnast all-around gold medal winner at the 2012 London Olympics tells her story of faith, perseverance, and determination.Douglas, Gabrielle,1995-Women gymnastsUnited StatesBiography.Women Olympic athletesUnited StatesBiography.GymnastsUnited StatesBiography.Olympic athletesUnited StatesBiography.Burford, Michelle,author.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00880nam a2200301Ka 4500ocn814290313OCoLC20130614123745.0121024s2013 flua e 001 0 eng d9781618100825 (hc)1618100823 (hc)(OCoLC)814290313APLAPLNTGUtOrBLWNTGA613.043223E613.0432 CLECleland, Jo.Clean teeth, dirty teeth /Jo Cleland.Vero Beach, Fla. :Rourke Educational Media,c2013.24 p. :col. ill. ;21 cm.Healthy habitsIncludes bibliographical references (p.24) and index.ChildrenHealth and hygiene.Sing and read.Healthy habits.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00862nam a2200301Ka 4500ocn814290240OCoLC20130614123745.0121024s2013 flua e 001 0 eng d9781618100818 (hc)1618100815 (hc)(OCoLC)814290240APLAPLNTGUtOrBLWNTGA613.043223E613.0432 CLECleland, Jo.Achoo! /Jo Cleland.Vero Beach, Fla. :Rourke Educational Media,c2013.24 p. :col. ill. ;21 cm.Healthy habitsIncludes bibliographical references (p.24) and index.ChildrenHealth and hygiene.Sing and read.Healthy habits.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00880nam a2200301Ka 4500ocn814290314OCoLC20130614123745.0121024s2013 flua e 001 0 eng d9781618100801 (hc)1618100807 (hc)(OCoLC)814290314APLAPLNTGUtOrBLWNTGA613.043223E613.0432 CLECleland, Jo.Clean hands, dirty hands /Jo Cleland.Vero Beach, Fla. :Rourke Educational Media,c2013.24 p. :col. ill. ;21 cm.Healthy habitsIncludes bibliographical references (p.24) and index.ChildrenHealth and hygiene.Sing and read.Healthy habits.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01308cam a2200361Ia 4500ocn806521671OCoLC20130614123745.0120817s2012 nvu e 000 1 eng d1612184227 (pbk.)9781612184227 (pbk.)(OCoLC)806521671YDXCPYDXCPBTCTABDXOEICPPNTGUtOrBLWNTGAPS3601.N55524I36 2012ANNECHIFIC ANNECHINOAnnechino, Daniel M.I do solemnly swear /D. M. Annechino.Las Vegas, NV :Thomas & Mercer,c2012.291 p. ;21 cm.As second in command, Katherine Ann Miles understands the responsibilities of her role as vice president. But when the president of the United States dies from a heart attack only eight months into his term, Katherine feels utterly unprepared to assume the highest office in the land.PresidentsFiction.AssassinsFiction.ConspiraciesFiction.TerrorismPreventionFiction.Washington (D.C.)Fiction.Political fiction.Suspense fiction.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01931cam a22003618a 4500ocn795168262OCoLC20121228202119.0121017s2013 nyu e 000 1 eng 201203911697804252528400425252841(OCoLC)795168262DLCengDLCBTCTAGO9IUKCXPNTGUtOrBLWn-us-caNTGAPS3556.O828R63 2013813/.5423FIC FOWLERFowler, Earlene.The road to Cardinal Valley /Earlene Fowler.1st ed.New York :Berkley Prime Crime,2013.viii, 291 p. ;24 cm."Ruby never thought she'd return to Cardinal, but she's hoping the place and people who gave her so much can give her brother Nash-who's been drowning in drink in Nashville-the fresh start he so desperately needs. Saddlemaker Lucas McGavin is thrilled that Ruby has come back. He hasn't given up on his love for her, despite the awkward fact that she is his brother's widow, and he's well aware that this may be his last chance to win Ruby's heart. When Nash starts drinking again and ends up in a devastating accident, Ruby decides she must find her estranged mother to help with an intervention. Two states away, Etta Walker harbors a horrible secret that keeps her from reconnecting with the children she deserted so many years ago. As they struggle with the present and confront the past, Ruby, Lucas, and Etta learn the power of forgiveness...and reach for a new future filled with hope, grace, and love."--Dust jacket.WidowsFiction.FamiliesCaliforniaFiction.Ranch lifeCaliforniaFiction.Sierra Nevada (Calif. and Nev.)Fiction.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01951cam a22004938a 4500ocn785077565OCoLC20121228115603.0121010s2012 nyu e 000 1 eng 201203774797803991606600399160663(OCoLC)785077565DLCengDLCBTCTABDXYDXCPOCLCOUPZGK5IUKNSBNTGUtOrBLWn-us---NTGAPS3557.R489137E47 2012813/.5423FIC GRIFFINGriffin, W. E. B.Empire and honor /W.E.B. Griffin and William E. Butterworth IV.New York :G. P. Putnam's Sons,c2012.517 p. ;24 cm.Honor bound series ;7In the aftermath of the surrenders of Germany and Japan in October 1945, Cletus Frade and his colleagues in the OSS are given the life-threatening task of maintaining security during a covert U.S. deal with Germany for intelligence about the identities of Soviet spies in the American atomic bomb program.United States.Office of Strategic ServicesFiction.Intelligence officersUnited StatesFiction.Frade, Cletus (Fictitious character)Fiction.World War, 1939-1945Secret serviceUnited StatesFiction.World War, 1939-1945Secret serviceSoviet UnionFiction.Atomic bombFiction.World War, 1939-1945Fiction.Spy stories.gsafdSuspense fiction.gsafdHistorical fiction.gsafdSpy stories.Suspense fiction.Historical fiction.Butterworth, William E.(William Edmund)Griffin, W. E. B.Honor bound ;7.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01356cam a2200397Ma 4500ocn801606122OCoLC20130614123746.0120710s2012 enk e 000 f eng d016127359Uk016147236Uk9781846140495 (hbk.)1846140498 (hbk.)(OCoLC)801606122UKMGBengUKMGBOCLCOCDXPZIMLYOCLCOYDXCPNTGUtOrBLWengfreNTGAPQ843.723FIC HUGOHugo, Victor,1802-1885.Les misérables /Victor Hugo ; translated and introduced by Norman Denny.London :Penguin Classics,2012.1,231 p. ;21 cm.Translated from the French.First published in 1862.Story of Valjean, the ex-convict who rises against all odds from galley slave to mayor, and the fanatical police inspector who dedicates his life to recapturing Valjean.Ex-convictsFiction.OrphansFiction.FranceHistory19th centuryFiction.FranceHistoryJuly Revolution, 1830Fiction.Paris (France)Fiction.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02029cam a22003618a 4500ocn796755120OCoLC20121228195549.0120918s2012 cau e 000 1 eng 20120356599781596923836 (hardcover)1596923830 (hardcover)(OCoLC)796755120DLCengDLCBTCTAYDXCPOCLCOBDXOCPABGCDXNTGUtOrBLWNTGAPS3611.O3643P48 2012813/.623FIC KOENIGSDORFKoenigsdorf, Jill.Phoebe & the ghost of Chagall :a novel /by Jill Koenigsdorf.Phoebe and the ghost of ChagallSan Francisco, CA :MacAdam/Cage,c2012.356 p. ;24 cm.Phoebe is an artist making very little money designing wine labels for a winery in Sonoma. Her house is in foreclosure, she's divorced, turning forty, and beleaguered on every front. Enter Marc Chagall s ghost, visible only to her, who appears to help her retrieve one of his own paintings that Phoebe's father found during the liberation of France. Meant for Phoebe and her mother, the painting never made it into their hands. In this debut comic novel, Phoebe and Chagall hunt down the painting in the South of France with help from a cast of characters including two sisters who are witches, a San Francisco Art dealer, and a misguided French innkeeper. Their snooping also leads Chagall to a few out of the hundred paintings that went missing during his lifetime. With skill and tension this book pits characters who appreciate art for its beauty against black market art dealers, evil collectors, and the mysterious German pawn hired to deliver the goods.Women artistsFiction.Ghost stories.gsafdHumorous fiction.gsafdGhost stories.Humorous fiction.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01411cam a2200373Ia 4500ocn467179667OCoLC20121228021541.0091119r20092003nyu e 000 1 eng d32018722497805535934190553593412(OCoLC)467179667(OCoLC)320187224IMFIMFBTCTAIHVORXBDXYDDNTGUtOrBLWn-us-caNTGAPS3561.O55F33 2009813/.5423FIC KOONTZKoontz, Dean R.(Dean Ray),1945-The face :a novel /Dean Koontz.Bantam Mass Market ed.New York :Bantam Books,2009, c2003.649 p. ;20 cm."Originally published in hardcover in the United States by Bantam Books in 2003"--T.p. verso.A riveting tour de force of suspense, mystery, and miraculous revelation, The Face is that rare novel that entertains, provokes, and uplifts at the same time. It will make you laugh, It will give you chills, It will fill you with hope.Motion picture actors and actressesFiction.Hollywood (Los Angeles, Calif.)Fiction.Private security servicesFiction.Stalking victimsFiction.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01267cam a2200397Ia 4500ocn793220360OCoLC20130614123746.0120501s2012 enk e 000 1 eng d9780857662781 (pbk.)0857662783 (pbk.)(OCoLC)793220360BTCTAengBTCTABDXYDXCPIEPCO2NTGUtOrBLWNTGAPR6112.Y519M47 2012823/.9223FIC LYLELyle, Anne.The merchant of dreams /Anne Lyle.Nottingham, [England] :Angry Robot,2012.522 p. ;18 cm.Night's masque ;vol. IIIn this sequel to The Alchemist of Souls, a group of renegades cause a rift among the Skraylings.AssassinsFiction.Imaginary societiesFiction.BodyguardsFiction.MagicFiction.EnglandSocial life and customs16th centuryFiction.Great BritainFiction.Historical fiction.Fantasy fiction.Lyle, Anne.Night's masque ;. 2.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01107cam a2200361Ia 4500ocn807034417OCoLC20121228194114.0120816s2012 nvu e 000 1 eng d 20129110539781612184319 (pbk.)1612184316 (pbk.)(OCoLC)807034417BTCTAengBTCTAYDXCPBDXJBUORXIG$NTGUtOrBLWengicee-ic---NTGA839/.693423FIC RAGNA SIGURDARDOTTIRRagna Sigurðardóttir.Hið Fullkomma landslag.EnglishThe perfect landscape /Ragna Sigurdardottir ; translated by Sarah Bowen.Las Vegas, NV :AmazonCrossing,2012.213 p. ;21 cm.Translation of: Hið Fullkomma landslag.Art museumsIcelandFiction.ArtForgeriesFiction.IcelandFiction.Bowen, Sarah,1957-trlAMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00851cam a2200313Ia 4500ocn693705050OCoLC20121228161942.0101217s2010 inu e 000 f eng d69370505114502578019781450257800145025781X9781450257817(OCoLC)693705050(OCoLC)693705051YDXCPengYDXCPOCLCQBDXJBUNTGUtOrBLWNTGAFIC WILCOXWilcox, C. E.A reluctant assassin /C. E. Wilcox.Bloomington, IN :iUniverse,2010.251 p. ;24 cm.MarinesFiction.AssassinsFiction.Organized crimeFiction.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01198cam a2200325Ma 4500ocn812070879OCoLC20121228220145.0120926s2013 nyu e 000 f eng d016181843Uk97807582784320758278438(OCoLC)812070879UKMGBengUKMGBOCLCOZS3LEBILCSRCNTGUtOrBLWe-gx---NTGAPS3623.I832P58 2013813.623FIC WISEMANWiseman, Ellen Marie.The plum tree /Ellen Marie Wiseman.New York :Kensington Books,c2013.387 p. ;21 cm.In the fall of 1938, as Germany rapidly changes under Hitler's regime, 17-year-old Christine Bolz, a domestic forbidden to return to the wealthy Jewish family she works for - and to her employer's son Isaac, confronts the Gestapo's wrath and the horrors of Dachau to survive and to be with the man she loves.GermanyHistory1933-1945Fiction.Historical fiction.gsafdJL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01898cam a2200529 a 4500ocn773667632OCoLC20130614123746.0120215s2012 nyua c 000 1 eng 20110519819780375867415 (trade)0375867414 (trade)9780375967412 (library binding)0375967419 (library binding)9780375864957 (trade paperback)0375864954 (trade paperback)9780375897900 (ebook)0375897909 (ebook)(OCoLC)773667632DLCengDLCBTCTABDXYDXCPWIQVP@NTGUtOrBLWpcclcacNTGAPZ7.B38823495Red 2012[Fic]23J BEILBeil, Michael D.The Red Blazer Girls :the secret cellar /Michael D. Beil.Secret cellar1st ed.New York :Alfred A. Knopf,c2012.274 p. :ill. ;22 cm.Red Blazer Girls ;[bk. 4]When Sophie finds a secret message in the antique fountain pen she bought for her father, she and her friends become involved in a treasure hunt devised by the pen's previous owner, whose house is full of puzzles that protect a hidden treasure.Mystery and detective stories.PuzzlesFiction.Buried treasureFiction.Eccentrics and eccentricitiesJuvenile fiction.ChristmasFiction.Catholic schoolsJuvenile fiction.SchoolsFiction.PuzzlesJuvenile fiction.Treasure trovesJuvenile fiction.Beil, Michael D.Red Blazer Girls ;bk. 4.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01648cam a22004098a 4500ocn766606333OCoLC20121228094822.0111201s2012 nyu c 000 1 eng 20110401329780761462279 (hardcover)0761462279 (hardcover)9780761462286 (ebook)0761462287 (ebook)(OCoLC)766606333DLCengDLCBDXIUKJP3NTGUtOrBLWlcacpccNTGAPZ7.B56513Mar 2012[Fic]23J BLAKEBlake, Stephanie(Stephanie J.),1969-The Marble Queen /Stephanie J. Blake.1st ed.New York :Marshall Cavendish Children,c2012.178 p. ;22 cm.Freedom Jane McKenzie does not like following rules, especially about what girls should do, but what she wants most of all is to enter and win the marble competition at the Autumn Jubilee to prove herself worthy of the title, Marble Queen.In 1959, ten-year-old Freedom Jane McKenzie wants to enter and win the marble competition at the Autumn Jubilee and win the title of Marble Queen, but first she must convince Mama that competing with boys is okay.Marbles (Game)Juvenile fiction.FamiliesIdahoJuvenile fiction.Sex roleJuvenile fiction.ContestsJuvenile fiction.IdahoHistory20th centuryJuvenile fiction.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01350cam a2200385Ia 4500ocn795173674OCoLC20121228093701.0120610s2012 nvua c 000 1 eng d97807614632690761463267(OCoLC)795173674BTCTAengBTCTABDXCO2OCLCOL@LNTGUtOrBLWNTGAPZ7.B642534Sp 2012[Fic]23J BONNETT-RAMPERSAUDBonnett-Rampersaud, Louise.The spring un-fair /by Louise Bonnett-Rampersaud ; pictures by Adam McHeffey.Spring unfair1st ed.Las Vegas, Nev. :Amazon Children's Publishing,c2012.111 p. :ill. ;22 cm.Secret knock club ;#2Agnes and the other members of the Secret Knock Club decide to form a rock band and perform at a concert to raise funds to rent a dunk tank for the spring fair.FairsJuvenile fiction.Rock groupsJuvenile fiction.ClubsJuvenile fiction.SchoolsJuvenile fiction.McHeffey, Adam James.Bonnett-Rampersaud, Louise.Secret knock club ;2.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01421cam a2200397 a 4500ocn781555963OCoLC20130614123746.0120322s2012 miub c 000 1 eng 20120058639781585368006 (hard cover)1585368008 (hard cover)(OCoLC)781555963DLCengDLCIG#BTCTABDXIK2UPZBKXQBXYDXCPNTGUtOrBLWlcacpccNTGAPZ7.D1644Qu 2012[Fic]23J DALLASDallas, Sandra.The quilt walk /by Sandra Dallas.Ann Arbor, MI :Sleeping Bear Press,c2012.213 p. :map ;21 cm."Based on a story in The quilt that walked to Golden"--T.p. verso.Ages 9 and up.Ten-year-old Emmy Blue learns the true meaning of friendship--and how to quilt--while making a harrowing wagon journey from Illinois to Colorado with her family in the 1860s.Wagon trainsJuvenile fiction.Frontier and pioneer lifeJuvenile fiction.QuiltingJuvenile fiction.FriendshipFiction.FriendshipJuvenile fiction.Dallas, Sandra.Quilt that walked to Golden.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01645cam a2200421 a 4500ocn776523388OCoLC20130614123746.0120821s2012 nyu c 000 1 eng 2012028738016126503Uk9780316205962 (hbk.)0316205966 (hbk.)(OCoLC)776523388DLCengDLCIG#BTCTABDXUKMGBOCOB@LOCLCONTGUtOrBLWNTGAPZ7.S456922Smp 2012[Fic]23J SELFORSSelfors, Suzanne.Smells like pirates /by Suzanne Selfors.1st ed.New York :Little, Brown,2012.369 p. ;21 cm.Smells like dog"Homer thought membership in L.O.S.T., the mysterious Society of Legends, Objects, Secrets, and Treasures, would help him find pirate Rumpold Smeller's missing treasure. But when Homer's enemy, Lorelei, forms an evil organization called FOUND, Homer and Dog face an impossible decision: Work with Lorelei to find the prize once and for all, or abandon their lifelong quest to locate the treasure"--Provided by publisher.Adventure and adventurersFiction.DogsFiction.PiratesJuvenile fiction.Secret societiesJuvenile fiction.Mystery and detective stories.Adventure stories.DogsJuvenile fiction.Selfors, Suzanne.Smells like Dog.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02025cam a2200457 a 4500ocn709407560OCoLC20130614123746.0110223s2012 mnuab c b 000 1 eng 20110051139780761370901 (lib. bdg. : alk. paper)0761370900 (lib. bdg. : alk. paper)(OCoLC)709407560DLCengDLCIG#BTCTABDXYDXCPMLYNTGUtOrBLWlcacpccNTGAPZ7.7.S46Shh 2012741.5/97322J SHAWShaw, Murray.Sherlock Holmes and the adventure of the cardboard box /based on the stories of Sir Arthur Conan Doyle ; adapted by Murray Shaw and M.J. Cosson ; illustrated by Sophie Rohrbach and JT Morrow.Minneapolis :Graphic Universe,c2012.48 p. :chiefly col. ill., col. map ;25 cm.On the case with Holmes and Watson ;#12Includes bibliographical references (p. 47).Retold in graphic novel form, Sherlock Holmes investigates when a spinster receives a package in the mail containing two severed ears. Includes a section explaining Holmes's reasoning and the clues he used to solve the mystery.Doyle, Arthur Conan,1859-1930.Adventure of the cardboard boxAdaptations.Holmes, Sherlock (Fictitious character)Comic books, strips, etc.Watson, John H. (Fictitious character)Comic books, strips, etc.Detective and mystery comic books, strips, etc.Graphic novels.Comic books, strips, etc.Cosson, M. J.Rohrbach, Sophie,ill.Morrow, J. T.,ill.Doyle, Arthur Conan,1859-1930.Adventure of the cardboard box.Shaw, Murray.On the case with Holmes and Watson ;#12.slh 20130111aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01172cam a2200349 a 4500ocn728102161OCoLC20121228202310.0110528s2012 mnuab c b 001 0 eng 20110210189781429676458 (library binding)1429676450 (library binding)(OCoLC)728102161DLCengDLCBTCTABDXHBPOCLCONTGUtOrBLWpccNTGAQL737.C23D66 2012599.75/5523J599.7555 DORDorisi-Winget, Dianna.Snow leopards /by Dianna Dorisi-Winget ; consultant, Christina Simmons.Mankato, Minn. :Capstone Press,c2012.32 p. :col. ill., col. map ;24 cm.Edge books. Big catsIncludes bibliographical references (p. 31) and index.Explores the habitat, life cycle, physical characteristics, and behavior of snow leopards.Snow leopardJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01172cam a2200361 a 4500ocn727357085OCoLC20130614123746.0110524s2012 mnuab c b 001 0 eng 20110210167811357099781429676465 (library binding : alk. paper)1429676469 (library binding : alk. paper)(OCoLC)727357085(OCoLC)781135709DLCengDLCBTCTABDXHBPOCLCONTGUtOrBLWpccNTGAQL737.C23H44 2012599.75623J599.756 HEGHegel, Claudette.Tigers /by Claudette Hegel Edge Books.Mankato, Minn. :Capstone,2012.32 p. :col. ill., col. map ;24 cm.Edge books: big catsIncludes bibliographical references and index.Explores the habitat, life cycle, physical characteristics, and behavior of tigers.TigerJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01084cam a2200349 a 4500ocn708357943OCoLC20121228200031.0110321s2012 mnua c b 001 0 eng 20110108309781429676434 (library binding)1429676434 (library binding)(OCoLC)708357943DLCengDLCBTCTABDXQBXHBPOCLCONTGUtOrBLWpccNTGAQL737.C23G347 2012599.75722J599.757 GAGGagne, Tammy.Lions /by Tammy Gagne.Mankato, Minn. :Capstone,2012.32 p. :col. ill. ;24 cm.Edge books. Big catsIncludes bibliographical references (p. 31) and index.Explores the habitat, life cycle, physical characteristics, and behavior of lions.LionJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01098cam a2200349 a 4500ocn708243787OCoLC20130614123746.0110321s2012 mnuab c b 001 0 eng 20110108259781429676410 (library binding)1429676418 (library binding)(OCoLC)708243787DLCengDLCBTCTABDXGO3OCLCONTGUtOrBLWpccNTGAQL737.C23G34 2012599.75/922J599.759 GAGGagne, Tammy.Cheetahs /by Tammy Gagne.Mankato, Minn. :Capstone,2012.32 p. :col. ill., col. map ;24 cm.Edge books. Big catsIncludes bibliographical references (p. 31) and index.Explores the habitat, life cycle, physical characteristics, and behavior of cheetahs.CheetahJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01957cam a2200481Ia 4500ocn796754518OCoLC20130614123746.0120626s2012 nyua c 000 0 eng 9780345525871 (pbk.)0345525876 (pbk.)(OCoLC)796754518OWLOWLBKXNTGUtOrBLWNTGAPN6728.G28D3948 2012741.5/6/97323J741.56973 DAVDavis, Jim,1945 July 28-Garfield.SelectionsGarfield takes his licks /by Jim Davis.Ballantine Books trade pbk. ed., 1st colorized ed.New York :Ballantine,2012.94 p. :chiefly col. ill. ;23 cm.Garfield classics ;24"Originally published in slightly different form in the United States by Ballantine Books ... in 1993"--T.p. verso."His 24th book"--Cover.Colorized versions of Garfield comic strips, featuring the adventures of Garfield the cat, Odie the dog, and their owner Jon.Garfield (Fictitious character)Comic books, strips, etc.Juvenile fiction.CatsComic books, strips, etc.Juvenile fiction.Human-animal relationshipsComic books, strips, etc.Juvenile fiction.Garfield (Fictitious character)Juvenile fiction.CatsFiction.Human-animal relationshipsJuvenile fiction.Humorous storiesJuvenile literature.Cartoons and comics.Humorous fiction.gsafdComic books, strips, etc.gsafdDavis, Jim,1945 July 28-Garfield classics ;24.Publisher descriptionhttp://catdir.loc.gov/catdir/description/random046/92090384.htmlDDG 20120801aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01533cam a2200421 i 4500ocn772611097OCoLC20130614123746.0120105s2013 nyua c 000 0 eng 20120004659781619132412 (hbk. : alk. paper)1619132419 (hbk. : alk. paper)9781619132467 (pbk. : alk. paper)161913246X (pbk. : alk. paper)(OCoLC)772611097DLCrdaengDLCIG#YDXCPOCLCOIHINTGUtOrBLWNTGANC783.I57 2012743.6/5723J743.657 INSInsects /[senior editor, Heather Kissock].Learn to draw insectsNew York, NY :AV2 by Weigl,[2013]32 pages :color illustrations ;26 cm.textrdacontentunmediatedrdamediavolumerdacarrierLearn to drawWhy draw? --Insects --Meet the ant --Meet the butterfly --Meet the dragonfly --Meet the firefly --Meet the grasshopper --Meet the praying mantis --Test your knowledge of insects --Draw an environment.Insects in artJuvenile literature.DrawingTechniqueJuvenile literature.Insects in art.Juvenile literature.Kissock, Heather.Learn to draw (New York, N.Y.)jab2012-12-27aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01759cam a2200373Ia 4500ocn793222870OCoLC20130614123746.0120503s2012 nvu e 000 1 eng d97816121843711612184375(OCoLC)793222870BTCTAengBTCTABDXHQBMR0YDXCPOCPIZ8NTGUtOrBLWe-fi---NTGA[Fic]M LEHTOLAINENLehtolainen, Leena.My first murder /Leena Lehtolainen ; translated by Owen F. Witesman.Las Vegas, NV :AmazonCrossing,2012.242 p. ;21 cm.Maria Kallio thriller"A Maria Kallio thriller" -- Cover.Maria Kallio has just been assigned her first murder investigation. To prove to herself and her squad that she has what it takes to be a detective, she'll have to solve the death of Tommi Peltonen. Found floating facedown at the water's edge of his Helsinki villa, Tommi had invited his choir group to spend a weekend at his retreat. But beneath the choir's seemingly tight-knit bonds seethed bitter passion and jealousy. As Maria sets out to determine the difference between friends and foes, she uncovers the victim's unsavory past - and motives for all seven suspects. Now it's up to her to untangle a complex set of clues before the killer strikes again.MurderInvestigationFiction.FinlandFiction.Suspense fiction.Mystery fiction.gsafdWitesman, Owen F.Lehtolainen, Leena.Maria Kallio thriller.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01363cam a2200373Ia 4500ocn780479943OCoLC20121228210451.0120315s2012 azu e 000 1 eng d 2012910475819655125819655526978146420024314642002469781464200267 (trade pbk.)1464200262(OCoLC)780479943(OCoLC)819655125(OCoLC)819655526BTCTAengBTCTABDXJCWM$KUUCOCLCONVCLEBMR0NTGUtOrBLWNTGAPS3612.O2485J35 2012813/.623M LOCKELocke, Hillary Bell.Jail coach :a Jay Davidovich mystery /Hillary Bell Locke.1st ed.Scottsdale, AZ :Poisoned Pen Press,c2012.249 p. ;23 cm.When a high-profile actor is sentenced to a brief prison term for a second DUI offense, former soldier and current corporate insurance loss minimizer Jay Davidovich hires a jail coach to ensure that the actor will be able to return to work upon his release.VeteransFiction.Hollywood (Los Angeles, Calif.)Fiction.Mystery fiction.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01799cam a2200397Ia 4500ocn779863792OCoLC20121228163023.0120301s2012 enk e 000 1 eng d016044313Uk8109057199780727881991 (hbk. : alk. paper)072788199X (hbk. : alk. paper)(OCoLC)779863792(OCoLC)810905719UKMGBUKMGBBDXBTCTAOCLCOBULIK2LEBGO3VP@CGPNTGUtOrBLWe-uk-enNTGAPR6063.A833S66 2012823/.9223M MASTERSMasters, Priscilla.Smoke alarm :a Martha Gunn mystery /Priscilla Masters.1st world ed.Sutton, Surrey, England :Severn House,2012.214 p. ;23 cm.Martha Gunn mysteryA deadly fire reunites coroner Martha Gunn with Detective Inspector Alex Randall. When firemen are called to an intense blaze at the Grange in Melverley, they find the bodies of Christie Barton, her daughter, Adelaide, and father-in-law, William, along with evidence that suggests the fire was started deliberately. Detective Inspector Alex Randall enlists the help of coroner Martha Gunn, but the puzzle deepens with a second house fire - the occupant, retired nurse Monica Deverill, is missing. Where is she, and what links the two fires? The answers lie in a secret buried in the past.CoronersFiction.Missing personsFiction.Shrewsbury (England)Fiction.Mystery fiction.gsafdMasters, Priscilla.Martha Gunn mystery.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00765nam a2200289Ia 4500ocn820335354OCoLC20130614123747.0121205s2012 xx e 000 0 eng d97814776804451477680446(OCoLC)820335354HBPHBPNTGUtOrBLWNTGA813.623M REDHEADRedhead, Doris.The death of Amber Torley /Doris Redhead.[S.l.] :Doris Redhead,c2012.346 p. ;23 cm.TeenagersFiction.Mystery fiction.gsafdFathers and daughtersFiction.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02569cam a2200421Ii 4500ocn774491473OCoLC20121228160635.0120126s2012 ohua e b 001 0 eng d016152836Uk9781599635965 (paper)1599635968 (paper)(OCoLC)774491473BTCTAengBTCTABDXUKMGBrdaDADQBXYDXCPOHRRHOJ4NTGUtOrBLWNTGAMT67.S657 2012782.42123R070.5794 TWO 20132013 songwriter's market /Roseann Biederman, editor.Songwriter's market36th annual edition.Cincinnati, Ohio :Writers Digest Books,[2012]©2012363 pages :illustrations ;23 cmtexttxtrdacontentunmediatednrdamediavolumencrdacarrierIncludes bibliographical references and indexes."The most trusted guide to getting published"--Cover.Getting started. How to use Songwriter's market ;Where should I send my songs? ;Demo recordings ;How do I submit my demo? ;How do I avoid the rip-offs? ;Submission strategies --Music biz basics. Royalties ;Copyright ;Career songwriting ;Contracts --Articles & interviews. Mitch Goldfarb /Janice Gable Bashman ; Motion creates e-motion /Pat Pattison ; Amy Stroup /Marielle Murphy ; Writing songs for the commercial market /C.J. Watson ; Social media /David McPherson ; Hillary Scott /Annie Downs ; Learning to say no /Pat Pattison ; Songwriting grants /David McPherson ; Baby boomers and songwriting /Doris Bloodsworth ; Andy Hunt /Adria Haley ; What to do when a song isn't working /C.J. Watson ; Getting through the doors /John Braheny --Managing your work. Songwriting calendar --Markets. Music publishers ;Record companies ;Record producers ;Managers & booking agents ;Music firms ;Play producers & publishers ;Classical performing arts ;Contests & awards --Resources. Organizations ;Workshops & conference ;Retreats & colonies ;Venues ;State & provincial grants ;Publications of interest ;Websites of interest ;Glossary.Popular musicWriting and publishing.Popular musicMarketingDirectories.Music tradeDirectories.Biederman, Roseann S.,editor.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01998cam a2200445Ia 4500ocn788284684OCoLC20121228002712.0120419s2012 onc e 000 1 eng d2012009598X9780373696475 (pbk.)0373696477 (pbk.)(OCoLC)788284684BTCTAengBTCTABDXIHIJBLY32OCLCQMQPNLCNTGUtOrBLWn-us-laNTGAPS3604.E44374R43 2012813.623ROM DELEONDeLeon, Jana.The reckoning /Jana DeLeon.Don Mills, Ont. :Harlequin,[2012], ©2012.217 p. ;17 cm.texttxtrdacontentunmediatednrdamediavolumencrdacarrierMystere ParishHarlequin intrigue ;1380"In a tiny bayou town with far too many wicked secrets, the sudden disappearance of a six-year-old girl is a mystery Sheriff Holt Chamberlain is determined to solve. But teaming up with Alexandria Bastin is a complication he didn't expect. Nor is he prepared to collide with the dark side of Cajun culture--and his own troubled past. The frantic search leads them to a place said to hold magic, an eerie island where Alex is also working her charm on him. At one time, he'd been forced to leave her brokenhearted. And now, dangerously close to the truth, he'll do anything to protect her from the evil that surrounds them--an evil that might hold the key to sending a little girl home"--Publisher.SheriffsFiction.Women psychiatristsFiction.LouisianaFiction.Missing childrenFiction.Romantic suspense fiction.gsafdDeLeon, Jana.Mystere Parish.Harlequin intrigue ;1380.vf2012-12-27aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02174cam a2200433Ia 4500ocn796757680OCoLC20121228194521.0120622s2012 onc e 000 1 eng d9780373885596 (pbk.)0373885598 (pbk.)(OCoLC)796757680BTCTAengBTCTABDXIHIIUKCGPNTGUtOrBLWNTGA813/.0850823ROM HOLIDAYHoliday with a vampire 4 /Susan Krinard, Theresa Meyers and Linda Thomas-Sundstrom.Don Mills, Ont., Canada :Harlequin,c2012.363 p. ;17 cm.Harlequin nocturne ;149Includes Heather Graham's The gatekeeper, the prequel to a new quartet, The keepers: L.A.Halfway to dawn: "Amid a supernatural war between the races, can Fiona, captain of the human special forces, and Kain, a compelling fugitive from the vampire army, risk working together to attain peace on earth?"--Publisher.The gift: "All Cullen McCormack wants for Christmas is the missing locket that holds the key to his immortality. But will Angelica, the woman who now flaunts it about her exquisite neck, also claim his vampire heart?"--Publisher.Bright star: "Immortal Dylan McCay has vowed to protect the secret origins of his species at all costs--until he meets Savannah, a passionate astronomer on the brink of discovering the elusive Christmas Star"--Publisher.Halfway to dawn /Susan Krinard --Gift /Theresa Meyers --Bright star /Linda Thomas-Sundstrom --Gatekeeper /Heather Graham.VampiresFiction.ImmortalityFiction.Man-woman relationshipsFiction.Paranormal romance stories.Christmas stories.Krinard, Susan.Halfway to dawn.Meyers, Theresa.Gift.Thomas-Sundstrom, Linda.Bright star.Graham, Heather.Gatekeeper.Harlequin nocturne ;149.JL2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02174cam a2200409Ia 4500ocn794711954OCoLC20121228192719.0120603s2012 nyub e 000 f eng d97814555143591455514357(OCoLC)794711954BTCTAengBTCTAOMPMR0WIMGO3UPZJO3NYPOCLCONTGUtOrBLWe-uk-stNTGAPS3569.C695L35 2012813/.5423;ROM SCOTTScott, Amanda,1944-The Laird's choice /Amanda Scott.1st ed.New York :Forever,2012.xii, 386 p. :map ;18 cm.Lairds of the Loch"Historical romance"--Spine.Includes an excerpt from the author's book The Knight Temptress: p. [361]-386."Lady Andrena MacFarlan has been different since the day she was born. Possessing the power to sense others' most intimate desires, she knows her duty is to marry the man who will take the MacFarlan name as his own and help her father regain the chiefdom of their clan. But her unique gifts don't prepare her for the day when a mighty warrior suddenly enters her life. The attraction between them is undeniable -- and insatiable. Hunted by brutal enemies, the wounded Magnus Galbraith washes up on MacFarlan land where he is rescued by a laird's lovely daughter. Andrena is like no one Magnus has ever known. She has the uncanny ability to both calm and enflame him in ways he never dreamed possible. But she has other unknown-and dangerous-powers. Now, as Magnus seeks to avenge a brother and protect a king, the young beauty could prove his greatest ally-or his ultimate undoing" -- p. [4] of cover.Man-woman relationshipsFiction.ScotlandHistory15th centuryFiction.Highlands (Scotland)Fiction.Love stories.gsafdHistorical fiction.gsafdScott, Amanda,1944-Lairds of the loch.AMW2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01444cam a22003618a 4500ocn795168355OCoLC20121228195332.0120912s2013 nyu e 000 1 eng 20120346709780425261224 (hardcover : alk. paper)0425261220 (hardcover : alk. paper)(OCoLC)795168355DLCengDLCBTCTAOCLCOUPZIUKJAONTGUtOrBLWNTGAPR6052.A849D63 2013823/.91423S BAXTERBaxter, Stephen.Doctor Who :the wheel of ice /Stephen Baxter.Wheel of iceAce hardcover ed.New York :Ace Books,2013.311 p. ;24 cm.Aboard the Wheel, a ring of ice and steel turning around a moon of Saturn and home to a mining colony supplying a resource-hungry Earth, the [2nd] Doctor, Jamie and Zoe become enmeshed in a critical situation. Suspected of sabatoge, they soon find themselves caught in a mystery that goes right back to the creation of the solar system. A mystery that could kill them all.Doctor Who (Fictitious character)Fiction.Science fiction.Science fiction.gsafdDoctor Who (Television program : 1963-1989)bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01332cam a2200409Ii 4500ocn819325422OCoLC20121228202524.0121126s2013 nyu e 000 1 eng d97803455115080345511506(OCoLC)819325422UPZengrdaUPZIUKIEPNTGUtOrBLWNTGAPS3576.A33S36 2012813/.5423S ZAHNZahn, Timothy,author.Star Wars.Scoundrels /Timothy Zahn.ScoundrelsNew York :Del Rey/Ballantine Books,2013.443 pages ;25 cmtextrdacontentunmediatedrdamediavolumerdacarrier"Lucas books."Han Solo, Chewbacca, and Lando Calrissian work together on a potentially lucrative heist in the hopes of paying of Jabba the Hutt's bounty on Han's head.Solo, Han (Fictitious character)Fiction.Calrissian, Lando (Fictitious character)Fiction.Life on other planetsFiction.Star Wars fiction.Star Wars fiction.Science fiction.gsafdbp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$03358ccm a2200397Ia 4500ocm48527791 OCoLC20121228204514.0011204s1999 cauzzz e n zxx d6430950599780769292649076929264X654979008026AF9948Alfred Publ Co.(OCoLC)48527791(OCoLC)643095059SDEengSDEBAKEROCPSINLBDEBBGOCLGK8BDXYDXCPNTGUtOrBLWNTGAM32.8.C6C6 1999786.2Complete21LR 56980rvkSCO 786.2164 COMComplete advanced piano solos :music for all occasions /[Dan Coates, arranger].Van Nuys, CA :Alfred Pub. Co.,c1999.304 p. of music ;31 cm."The professional touch"--Cover.Angel eyes --Anne of Green Gables. Anne's theme --Anywhere the heart goes (Meggie's theme) --Arthur's theme (Best that you can do) --As time goes by --Ashokan farewell --Beauty and the beast --Because you loved me --Can you read my mind? (Love theme from "Superman") --Canon in D /Pachelbel --The colors of my life --Colors of the wind --Desperado --A dream is a wish your heart makes --Evergreen --Favorite son --Forever --Friends & lovers (Both to each other) --From a distance --The greatest love of all --Happy birthday to you --Heart --Hey there --The homecoming --How could I ever know? --How do I live --How do you keep the music playing? --I believe I can fly --I can love you like that --(Everything I do) I do it for you --I don't want to miss a thing --I say a little prayer --I swear --I will always love you --If my friends could see me now? --If you believe --in this life --Karen's theme --Kei's song.La vie en rose --Love and marriage --Love solo --May you always --Miss Celie's blues --Misty --My one true friend --My unknown someone --Noelle's theme (The other side of midnight) --Oh! What it seemed to be --Once before I go --One moment in time --Open arms --Over the rainbow --The prayer --Ragtime --The rose --Saving all my love for you --Send in the clowns --Separate lives (Love theme from "White nights") --She loves me --Song from MASH (Suicide is painless) --Star Wars main theme --Summer me, winter me (Theme from "Picasso summer") --Tears in heaven --Tell him --That's what friends are for --Theme from Ice Castles (Through the eyes of love) --Theme from New York, New York --Time to say goodbye --Tonight I celebrate my love --Un-break my heart --Up where we belong --Valentine --We've got tonight --What's new? --The wind beneath my wings --You can always count on me.Piano music, Arranged.Popular instrumental music.Coates, Dan.Online version:Complete advanced piano solos.Miami, FL : Warner Bros. Publications, c1999(OCoLC)647113373Table of contentshttp://bvbr.bib-bvb.de:8991/F?func=service&doc_library=BVB01&doc_number=017759340&line_number=0001&func_code=DB_RECORDS&service_type=MEDIAcsr2012-12-28cC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$03288ccm a2200505Ia 4500ocn648933639OCoLC20130614123747.0100715r20101971xxumuz e abei n zxx d9780739073100073907310903808140215436268Alfred Music Pub. Co.(OCoLC)648933639BTCTAengBTCTAFQGOHXVP@NYPNTGUtOrBLWw9x1rgmrwzka01NTGAM22.JSCO 786.21645 JOPJoplin, Scott,1868-1917.Piano musicComplete piano works /Scott Joplin ; edited by Vera Brodsky Lawrence ; editorial consultant, Richard Jackson ; introduction by Rudi Blesh.Complete piano works :rags, waltzes, marchesRags, waltzes, marches[New York] :New York Public Library ;Van Nuys, CA :[Distributed by] Alfred Music Pub. Co.,c [2010?], c1971.xl, [6], 327 p. of music :ill., facsims., ports. ;31 cm."Produced by The New York Public Library in conjunction with Belwin-Mills Publishing Corp."--T.p.Originally published in 1971 under the title: The collected works of Scott Joplin.Includes editor's note and introduction in English.Rollography of Joplin works: p. 317-318; discography of 78 rpm records: p. 319-321; selective discography of 33 1/3 rpm records: p. 322-324.Includes bibliographical references and index.Original works.Great crush collision --Combination march --Harmony Club waltz --Original rags /arranged by Charles N. Daniels --Maple leaf rag --Peacherine rag --Augustan Club waltz --The easy winners --Cleopha --A breeze from Alabama --Elite syncopations --The entertainer --March majestic --The strenuous life --Weeping willow --Palm leaf rag --The favorite --The sycamore --The Cascades --The chrysanthemum --Bethena --Rosebud --Leola --Binks' waltz --Eugenia --Antoinette --Ragtime dance --Gladiolus rag --Nonpareil (None to equal) --Sugar cane --Pine apple rag --Wall Street rag --Solace --Pleasant moments --Country club --Euphonic sounds --Paragon rag --Stoptime rag --Scott Joplin's new rag --Magnetic rag --Reflection rag.Collaborative works.Swipesy /with Arthur Marshall --Sunflower slow drag /with Scott Hayden --Something doing(with Scott Hayden) --Lily Queen(with Arthur Marshall) --Heliotrope bouquet /with Louis Chauvin --Felicity rag /with Scott Hayden --Kismet rag /with Scott Hayden.Miscellaneous works.School of ragtime --Sensation /by Joseph F. Lamb ; arranged by Scott Joplin --Silver swan rag /attributed to Scott Joplin.Addenda (1981).Fig leaf rag --Rose leaf rag --Searchlight rag.Piano music (Ragtime)Marches (Piano)Waltzes.Piano music.Lawrence, Vera Brodsky.Jackson, Richard,1936-Blesh, Rudi,1899-1985.csr2012-12-28cC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01485cam a2200421 a 4500ocn778421992OCoLC20130614123747.0120323s2012 nyu d 000 1 eng 20120063279780805089516 (hbk.)0805089519 (hbk.)9780805096361 (ebk.)0805096361 (ebk.)(OCoLC)778421992DLCengDLCIG#BTCTABDXOCPOCLCONTGUtOrBLWNTGAPZ7.B52859Mi 2012[Fic]23Y BJORKMANBjorkman, Lauren.Miss Fortune Cookie /Lauren Bjorkman.1st ed.New York :Henry Holt,2012.279 p. ;23 cm.Erin, a non-Chinese teenager living in San Francisco's Chinatown, ghostwrites an online advice column, but when a reply to her ex-best friend backfires, Erin's carefully constructed life takes a crazy spin.Advice columnsJuvenile fiction.Interpersonal relationsJuvenile fiction.FriendshipJuvenile fiction.Chinese AmericansJuvenile fiction.Chinatown (San Francisco, Calif.)Juvenile fiction.San Francisco (Calif.)Juvenile fiction.FriendshipFiction.Chinese AmericansFiction.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01238cam a2200349Ia 4500ocn813858201OCoLC20121228090652.0121009s2013 nyub d 000 f eng d016191174Uk9780758281463 (pbk.)0758281463 (pbk.)(OCoLC)813858201UKMGBengUKMGBJQWZS3NTGUtOrBLWNTGAPZ7.E4853Cr 2013813.623Y ESTEPEstep, Jennifer.Crimson frost :a Mythos Academy novel /Jennifer Estep.New York :KTeen,c2013.389 p. :map ;21 cm.A Mythos Academy novelArrested in the middle of her first date with Logan and wrongly accused of helping the Reapers free the evil god Loki, Gwen is shocked to learn that her accuser is Logan's father and that everyone at Mythos Academy believes she is guilty.Frost, Gwen (Fictitious character)Fiction.Paranormal fiction.Young adult fiction.Estep, Jennifer.Mythos Academy novel.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01391cam a2200409Ia 4500ocn810191754OCoLC20121228171237.0120918s2012 nyu e 000 1 eng d016167443Uk77841645797800619627070061962708(OCoLC)810191754(OCoLC)778416457UPZengUPZBTCTABDXUKMGBXY4YDXCPIK2VP@ZP7NTGUtOrBLWn-us-nyNTGAPZ7.G53887Luc 2012[Fic]23Y GODBERSENGodbersen, Anna.The lucky ones :a bright young things novel /Anna Godbersen.1st ed.New York :Harper,c2012.375 p. ;22 cm.Bright young things novelNew York City's latest It Girl, Cordelia Grey, is flying high with celebrity pilot Max Darby. But such a public relationship could expose some very personal secrets.Social classesJuvenile fiction.SecretsFiction.WealthJuvenile fiction.FriendshipJuvenile fiction.New York (N.Y.)History1898-1951Juvenile fiction.Godbersen, Anna.Bright young things.AMW2012-12-20aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01909cam a2200589 a 4500ocn709682525OCoLC20130614123747.0110804s2011 nyu d 000 1 eng 2011023301757509465978038574098203857409809780375989797037598979X(OCoLC)709682525(OCoLC)757509465DLCDLCBTCTABULTOHHBPDPLNTGUtOrBLWlcacpccNTGAPZ7.H70326Unl 2011[Fic]23Y HOLDERHolder, Nancy.Unleashed /Nancy Holder & Debbie Viguié.1st ed.New York :Delacorte Press,c2011.385 p. ;22 cm.Wolf springs chroniclesOrphaned Kat McBride, nearly seventeen, must leave California to live with her grandfather in small-town Arkansas, where she is drawn into a paranormal world of feuding werewolf clans.SupernaturalFiction.High schoolsJuvenile fiction.SchoolsFiction.WerewolvesJuvenile fiction.Moving, HouseholdJuvenile fiction.OrphansJuvenile fiction.GrandfathersJuvenile fiction.ArkansasJuvenile fiction.Paranormal fiction.High schoolsFiction.SchoolsFiction.WerewolvesFiction.Moving, HouseholdFiction.OrphansFiction.GrandfathersFiction.ArkansasFiction.Viguié, Debbie.Holder, Nancy.Wolf Springs chronicles.edl 20111205aConnexion ImportsKCLSC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01671cam a2200493 a 4500ocn780415630OCoLC20130614123747.0120314s2012 nyu d 000 1 eng 20120047957776024029781416998075 (pbk)1416998071 (pbk)9781416998068 (hc)1416998063 (hc)9781416998105 (ebook)1416998101 (ebook)(OCoLC)780415630(OCoLC)777602402DLCengDLCIG#BTCTABDXYDXCPUPZOCLCOJBLNTGUtOrBLWpcclcacNTGAPZ7.H70326Van 2012[Fic]23Y HOLDERHolder, Nancy.Vanquished /Nancy Holder & Debbie Viguie.1st Simon Pulse ed.New York :Simon Pulse,2012.462 p. ;21 cm.Crusade trilogy ; bk. 3On the brink of the final battle against the Cursed Ones, the Salamancan hunters' internal bickering threatens their cause, and Jenn must try to rally her team while facing her own doubts, especially about her love for Antonio.VampiresJuvenile fiction.Guerrilla warfareJuvenile fiction.SupernaturalFiction.SistersJuvenile fiction.Horror stories.Paranormal fiction.Young adult fiction.Viguié, Debbie.Holder, Nancy.Crusade trilogy ;bk. 3.jab2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01517cam a2200421 a 4500ocn779266083OCoLC20130614123747.0120331s2012 nyu d 000 1 eng 20120082589781442434356 (hardcover)144243435X (hardcover)9781442434387 (ebook)1442434384 (ebook)9781442434370 (trade paper)1442434376 (trade paper)(OCoLC)779266083DLCengDLCIG#BTCTABDXYDXCPOCPOCLCONTGUtOrBLWlcacpccn-us-meNTGAPZ7.M4787928686Lo 2012[Fic]23Y MCNAMARAMcNamara, Amy.Lovely, dark and deep /Amy McNamara.1st ed.New York :Simon & Schuster Books for Young Readers,c2012.342 p. ;22 cm.Ages 14 up.In the aftermath of a car accident that kills her boyfriend and throws her carefully planned future into complete upheaval, high school senior Wren retreats to the deep woods of Maine to live with the artist father she barely knows and meets a boy who threatens to pull her from her safe, hard-won exile.Depression, MentalJuvenile fiction.GriefJuvenile fiction.MaineJuvenile fiction.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02255cam a2200445Ia 4500ocn770297437OCoLC20121228182313.0120305s2012 nyu d 000 1 eng d 2011275889015951747Uk7459786969780758269249 (pbk.)0758269242 (pbk.)(OCoLC)770297437(OCoLC)745978696VPWengVPWDLCBTCTAUKMGBNCLHCOBKXBDXYDXCPJBLIUINTGUtOrBLWlccopycatNTGAPZ7.R252314Li 2012813.623Y REEDReed, Jaime.Living violet /Jaime Reed.New York :Dafina KTeen Books,c2012.viii, 311 p. ;21 cm.The Cambion chronicles ;bk. 1"Kensington Publishing Corp."Includes a reading group guide.Includes an excerpt from Burning emerald."He's persuasive, charming, and way too mysterious. And for Samara Marshall, her co-worker is everything she wants most--and everything she most fears ... Samara Marshall is determined to make the summer before her senior year the best ever. Her plan: enjoy downtime with friends and work to save up cash for her dream car. Summer romance is not on her to-do list, but uncovering the truth about her flirtatious co-worker, Caleb Baker, is. From the peculiar glow to his eyes to the unfortunate events that befall the girls who pine after him, Samara is the only one to sense danger behind his smile. But Caleb's secrets are drawing Samara into a world where the laws of attraction are a means of survival. And as a sinister power closes in on those she loves, Samara must take a risk that will change her life forever ... or consume it"--Publisher's description.TeenagersJuvenile fiction.High school studentsJuvenile fiction.Paranormal fiction.African American teenagersJuvenile fiction.Paranormal romance stories.Reed, Jaime.Cambion chronicles ;bk. 1.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01589cam a2200457 a 4500ocn773493500OCoLC20130614123747.0120119r20122011nyu d 000 1 eng 20110465829780545429603 (hbk.)0545429609 (hbk.)97805454425100545442516(OCoLC)773493500DLCengDLCIG#BDXOCPDPLOCLCONTGUtOrBLWe-uk-wlNTGAPZ7.S43748Roc 2012[Fic]23Y SKUSESkuse, C. J.Rockoholic /C.J. Skuse.1st American ed.New York :Scholastic,2012.358 p. ;22 cm."First published in the United Kingdom in 2011 by Chicken House"--T.p. verso.Sixteen-year-old Jody Flook is known for doing stupid things, but when she accidentally kidnaps her idol, rock star Jackson Gaitlin, at his only concert in the entire United Kingdom, and he does not want to leave her garage, she is in real trouble.KidnappingJuvenile fiction.FameJuvenile fiction.MusiciansJuvenile fiction.Rock musicJuvenile fiction.Best friendsJuvenile fiction.FriendshipJuvenile fiction.WalesJuvenile fiction.Best friendsFiction.FriendshipFiction.bp2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02256cam a22003738a 4500ocn401141831OCoLC20130614123748.0111128s2012 nyu d b 001 0beng 20110450499781604135220 (acid-free paper)1604135220 (acid-free paper)(OCoLC)401141831DLCengDLCBTCTABDXYDXCPOCLCOBURNTGUtOrBLWe-uk-enNTGAPR2894.F58 2012822.3/3B23YB SHAKESPEAREFoster, Brett,1973-Shakespeare's life /Brett Foster.New York :Chelsea House,c2012.256 p. :ill. (chiefly col.) ;25 cm.Backgrounds to ShakespeareA thorough biography of the Bard, featuring the latest findings from scholars. Coverage includes: Shakespeare's early years in Stratford, including his marriage to Anne Hathaway; his rise to stardom within the London theater scene; the death of his nine-year-old son, Hamnet; the writing of his greatest works, including Romeo and Juliet, Hamlet, Macbeth, and others; his retirement from the theater and move back to Stratford; and much more.--From publisher description.Shakespeare's birth and adolescence --The Royal Shakespeare Company in Stratford --Shakespeare the young man --Anne Hathaway --Shakespeare's "lost years" --Shakespeare's 1580s writings? --Shakespeare's beliefs --Shakespeare the young actor and writer --The commercial theaters of London --Shakespeare's stage --Shakespeare's early successes --The plays: aids for reading --Shakespeare's growing fame --Shakespeare's books --Shakespeare back in Stratford --Shakespeare's triumphs at the Globe --How to move a theater --The Globe Theatre today --Shakespeare the literary lion --An overlooked Shakespeare poem.Includes bibliographical references and index.Shakespeare, William,1564-1616.Dramatists, EnglishEarly modern, 1500-1700Biography.Backgrounds to Shakespeare.vf2012-12-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01572cam a22004458i 4500ocn810111518OCoLC20121228200117.0121029s2012 meua ed 000 1 eng 20120405479781410454034 (lg. print)1410454037 (lg. print)(OCoLC)810111518DLCengrdaDLCBTCTAYDXCPOCLCOGO3IEPZGVIK2NTGUtOrBLWpccNTGAPS3553.U75P68 2012b813/.5423LP FIC CUSSLERCussler, Clive.Poseidon's arrow /Clive Cussler and Dirk Cussler.Large print edition.Waterville, Maine :Wheeler Publishing,2012.671 pages (large print) :illustrations ;23 cm.textrdacontentunmediatedrdamediavolumerdacarrierA Dirk Pitt novelWhen a key element of a new and powerful attack submarine goes missing and ships begin disappearing in mid-ocean, NUMA director Dirk Pitt and his team embark on an international chase to discover the truth.Pitt, Dirk (Fictitious character)Fiction.Marine biologistsFiction.Large type books.Suspense fiction.gsafdAdventure fiction.gsafdCussler, Dirk.Cussler, Clive.Dirk Pitt adventure.bp2012-12-28qC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01534cam a22004338a 4500ocn805701816OCoLC20130614123748.0120808r20122012meuaj ed 000 1 eng 20120320279781410451453 (lg. print)1410451453 (lg. print)(OCoLC)805701816DLCengDLCBTCTAYDXCPOCLCOIUKIEPIHINTGUtOrBLWpccn-us-ncNTGAPS3563.A679B89 2012813/.5423LP M MARONMaron, Margaret.The buzzard table /Margaret Maron.Large print ed.Waterville, Me. :Thorndike Press,2012.399 p. (large print) :ill., geneal. table ;23 cm.Thorndike Press large print mysteryA Deborah Knott mysteryA mysterious ornithologist staying at Mrs. Lattimore's Victorian home and doing research on Southern vultures seems familiar to Judge Deborah Knott and Sheriff's Deputy Dwight Bryant, especially after a murderer strikes.Knott, Deborah (Fictitious character)Fiction.Women judgesFiction.North CarolinaFiction.Large type books.Mystery fiction.gsafdLarge type books.Thorndike Press large print mystery series.bp2012-12-28qC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01727cam a2200385 a 4500ocn803474001OCoLC20130614123748.0120731s2012 meu ed 000 1 eng 20120308179781410452269 (lg. print : hbk.)1410452263 (lg. print : hbk.)(OCoLC)803474001DLCengDLCIG#OCLCOYDXCPBTCTAMPCOCLCONTGUtOrBLWe-uk-enNTGAPR6037.A95S8 2012823/.91223LP M SAYERSSayers, Dorothy L.(Dorothy Leigh),1893-1957.Strong poison :a Lord Peter Wimsey mystery with Harriet Vane /Dorothy L. Sayers.Large print ed.Waterville, Maine :Thorndike Press,2012.407 p. (large print) ;23 cm.Thorndike Press large print famous authorsWhen her fiancé dies exactly as described in one of her novels, mystery writer Harriet Vane becomes the prime suspect. Can Lord Peter Wimsey find the real poisoner in time to save her from the gallows? Impossible, it seems. The Crown's case is watertight. The police are adamant. The judge's summing-up is clear: Harriet Vane is guilty. But Lord Peter is determined to find her innocent - as determined as he is to make her his wife.Wimsey, Peter, Lord (Fictitious character)Fiction.Private investigatorsEnglandFiction.Large type books.Mystery fiction.gsafdThorndike Press large print famous authors series.edl2012-12-28qC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01334cam a2200397 a 4500ocm62343381 OCoLC20060620094516.0050324s2005 enk e 000 0 eng GBA531783bnb013175440Uk1844541290 (hbk.)UKMUKMOUNBAKERNTGUtOrBLWengfreukscpNTGA961.204209222DT236.Q2413 2005x961.204209 QADQaddafi, Muammar.Dans le concert des nations.EnglishMy vision /Muammar Gaddafi ; conversations and frank exchanges of views with Edmond Jouve ; translated into English by Angela Parfitt.London :John Blake,2005.ix, 261 p. ;24 cm.Translation of: Dans le concert des nations.Qaddafi, Muammar.Qaddafi, MuammarPhilosophy.Heads of stateLibyaBiography.LibyaForeign relations1969-LibyaPolitics and government1969-Jouve, Edmond.Parfitt, Angela..b2014243203-02-1004-17-06(2)be(2)bo(2)kt06-20-06ma-engenk03C0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01964cjm a2200469Ia 4500ocn710059261OCoLC20130614123748.0sd fungnnmmneu110323s2011 vaumunne n eng d00610583396720gtin-1461058339672026393-DSaguaro Road Records3964N2(OCoLC)710059261BTCTAengBTCTABKXTEFCPLB@LUtOrBLWcygmNTGAM2198.B55T35 2011782.5/25422CD CR BLI T93 Blind Boys of Alabama.prfTake the high road[sound recording] /Blind Boys of Alabama.[Fairfax, Va.] :Saguaro Road Records,p2011.1 sound disc :digital ;4 3/4 in.Blind Boys of Alabama; with other performers.Produced by Jamey Johnson, Chris Goldsmith, Kevin Grantt, and Chat Cromwell.Recorded principally at Ben's Studio, Nashville, Tenn.Compact disc.Take the high road(with the Oak Ridge Boys) --Jesus, hold my hand --Have thine own way, Lord(with Jamey Johnson) --I was a burden(with Lee Ann Womack) --Can you give me a drink?(with Vince Gill) --Family Bible(with Willie Nelson) --Jesus built a bridge to heaven --I know a place --Why don't you live so God can use you --Lead me home --Stand by me --I saw the light (with Hank Williams, Jr.) --The last mile of the way.Country gospel music2011-2020.localJohnson, Jamey.prfWomack, Lee Ann.prfGill, Vince.prfNelson, Willie,1933-prfWilliams, Hank,Jr.,1949-prfOak Ridge Boys.prfdjn 201204199786314208103 (BT)jC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01440cjm a2200421Ia 4500ocn712135416OCoLC20130614123748.0sd fungnnmmneu110404s2011 tnucynne d n eng d00093624966678gtin-14093624966678523992-2Warner Bros.(OCoLC)712135416BTCTAengBTCTABKXTEFGK8UtOrBLWNTGAM1630.18.R53R53 2011782.421642781.64222CD PC RIC R92Rich, John,1974-Rich rocks[sound recording] /John Rich.Nashville :Warner Bros.,p2011.1 sound disc :digital ;4 3/4 in.Performed by John Rich.Compact disc.Lyrics in container insert.Country done come to town --You had me from hello(feat. Lil Jon) --Mack truck(feat. Kid Rock) --You rock me --Texas(feat. Cowboy Troy) --Let somebody else drive(feat. Hank Williams, Jr.).Country music2011-2020.Lil Jon,1971-prfKid Rock(Rapper)prfCowboy Troy,1970-prfWilliams, Hank,Jr.,1949-prfcme2011-10-149786314224332 (BT)jConnexion ImportsKCLSC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01482cjm a2200373Ia 4500ocn515677946OCoLC20130614123748.0sd fsngnnmmneu100213s2010 maubgnne n eng d00011661064126gtin-1401166106412611661-0641-2Rounder Records(OCoLC)515677946BTCTAengBTCTATEFNTGUtOrBLWNTGAM1630.18.G73F36 2010782.42164222CD PC GRA F41Grascals (Musical group)prfThe famous Lefty Flynn's[sound recording] /the Grascals.Burlington, MA :Rounder Records,p2010.1 sound disc :digital ;4 3/4 in.Performed by Grascals.Compact disc.Last train to Clarksville --Son of a sawmill man --Satan and Grandma --Everytime --Out comes the sun --Blue rock slide --The famous Lefty Flynn's --My baby's waiting on the other side --My old friend the blues --Up this hill and down --I'm blue I'm lonesome(feat. Hank Williams Jr.) --Give me Jesus.Bluegrass music2001-2010.Williams, Hank,Jr.,1949-prf.b2379581509-19-1003-18-10befwkgmvsa04-29-10mj-engmau459786313524099 (BT)cmeC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$03126cjm a2200685Ia 4500ocn472481247OCoLC20130614123748.0sd fungnn|||ed091203s2010 caumun e eng d509996078802307880Grammy Recordings/Capitol50999 6 07880 2 3Grammy Recordings/Capitol50999 6 07880 23Grammy Recordings/Capitol(OCoLC)472481247Midwest Tapehttp://www.midwesttapes.comTEFMTTEFMTTEFNTGUtOrBLWpprccyNTGAM1630.18.T96 2010782.4216422CD PR GRA G80Grammy nominees 2010[sound recording].2010 Grammy nominees[Santa Monica, CA] :Grammy Recordings ;New York :Capitol,p2010.1 sound disc :digital ;4 3/4 in.Various performers.Compact disc.I gotta feeling(The Black Eyed Peas) --Poker face(Lady Gaga) --Use somebody(Kings of Leon) --You & me(Dave Matthews Band) --You belong with me(Taylor Swift) --Fallin' for you(Colbie Caillat) --You found me(The Fray) --Sober(P!nk) --My life would suck without you(Kelly Clarkson) --Hot n cold(Katy Perry) --Halo(Beyoncé) --Hometown glory(Adele) --Chicken fried(Zac Brown Band) --It happens(Sugarland) --I run to you(Lady Antebellum) --Here come goodbye(Rascal Flatts) --21 guns(Green Day) --Life in technicolor ii(Coldplay) --I'll go crazy if I don't go crazy tonight(U2) --Can't find my way home(Eric Clapton and Steve Winwood).Popular music2001-2010.Rock music2001-2010.Country music2001-2010.Grammy Awards.Lady Gaga.Swift, Taylor,1989-Caillat, Colbie.P!nk,1979-Clarkson, Kelly,1982-Perry, Katy.Beyoncé,1981-Adele,1988-Clapton, Eric.Winwood, Steve,1948-Black Eyed Peas (Musical group)Kings of Leon (Musical group)Dave Matthews Band.Fray (Musical group)Zac Brown Band.Sugarland (Musical group)Lady Antebellum (Musical group)Rascal Flatts (Musical group)Green Day (Musical group)Coldplay (Musical group)U2 (Musical group).b2359032409-20-1001-06-10ap(3)au(4)bebd(2)bo(2)bp(3)buca(4)cocr(2)dm(2)du(2)frfc(2)f3(4)fwfogr(3)iskm(4)ktkg(2)kl(2)lflh(2)mv(3)mi(2)mkncamu(2)nw(2)nb(4)ou(4)rern(2)rbsa(3)shsk(2)sw(2)snso(2)vv(2)vswc(2)wl(2)wm01-20-10mj-engcau0939786313375578 (BT)cmeC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01397cjm a2200385Ia 4500ocn694633146OCoLC20110311153151.0sd fungnn|||eu101231s2011 xxkmunne n eng d88697446992688697 44699 2XL Recordings(OCoLC)694633146TEFMTTEFMTTEFBTCTAIMDGL9MR0UtOrBLWpprcNTGAM1630.18.A24T94 2011782.4216622CD PR ADE T99Adele,1988-21[sound recording].Twenty one[United Kingdom] :XL Recordings,p2011.1 sound disc (48 min.) :digital ;4 3/4 in.004808Adele, vocals ; with acc. musicians.Compact disc.Rolling in the deep(3:48) --Rumour has it(3:44) --Turning tables(4:10) --Don't you remember(4:03) --Set fire to the rain(4:02) --He won't go(4:38) --Take it all(3:49) --I'll be waiting(4:02) --One and only(5:49) --Lovesong(5:17) --Someone like you(4:46).Popular music2011-2020.Alternative rock music2011-2020.localSoul music2011-2020.localcme 201103119786314072902 (BT)j$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02800cgm a2200637Ia 4500ocm42581583OCoLC20130614123748.0vd bvaizq991006p19991982cau089 e vleng d0783232063978078323206502519205232320523Universal Home Video(OCoLC)42581583TOLTOLOCLCQOCLBTCTASTFZ5UYDXCPOCLCQOCLCGTEFOCLCQUMCNTGUtOrBLWengengfrespaengNTGADVD FIC DDead men don't wear plaid[videorecording] /a Universal picture ; an Aspen Film Society/William E. McEuen/David V. Picker production ; written by Carl Reiner, George Gipe, Steve Martin ; produced by David V. Picker and William E. McEuen ; directed by Carl Reiner.Universal City, CA :Universal Home Video,1999.1 videodisc (1 hr., 29 min.) :sd., b&w ;4 3/4 in.DVD, widescreen format; Dolby Digital mono.In English with optional French or Spanish subtitles; closed-captioned.Title and credits from container.Steve Martin, Rachel Ward, Reni Santoni, Carl Reiner.Director of photography, Michael Chapman; editor, Bud Molin; music, Miklos Rozsa.MPAA rating: PG.Originally released as a motion picture in 1982.Private eye Rigby Reardon investigates the death of a wealthy scientist with a little help from his "friends." A new technique, recycling films of the '40s with a new story line.Special features include production notes, cast & filmmakers' bios, film highlights, theatrical trailer (2 min.) and web links.Private investigatorsDrama.ScientistsDeathDrama.MurderInvestigationDrama.Feature filmsUnited States.lcgftDetective and mystery films.lcgftComedy films.lcgftVideo recordings for the hearing impaired.lcgftVideo recordings with French language options.|2local.Video recordings with Spanish language options.|2local.Reiner, Carl,1922-Gipe, George.Martin, Steve,1945-Picker, David V.McEuen, William E.Ward, Rachel,1957-Santoni, Reni.Rózsa, Miklós,1907-1995.Universal City Studios.Universal Studios Home Video (Firm)Aspen Film Society.kh 20110512hConnexion ImportsKCLSC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02222cgm a2200553Ia 4500ocm41443902 OCoLC20130614123748.0vd cvaizq990527p19991983cau090 e vleng d079073995X978079073995308539163752316375Warner Home Video(OCoLC)41443902KCPKCPOCLCQLEORCSBTCTAVP@OCLCQNTGUtOrBLWNTGADVD FIC MThe man with two brains[videorecording] /Warner Bros. presents ; a Carl Reiner film ; an Aspen Film Society, William E. McEuen/David V. Picker production ; written by Carl Reiner, Steve Martin, George Gipe ; produced by David V. Picker and William E. McEuen ; directed by Carl Reiner.Burbank, CA :Warner Home Video,c1999.1 videodisc (90 min.) :sd., col. ;4 3/4 in.DVD, full screen presentation; Dolby Digital.Closed-captioned.Steve Martin, Kathleen Turner, David Warner.MPAA rating: R.Originally released as a motion picture in 1983.A wacky brain surgeon mourns for his wife and then falls for a beauty who hides a heart of stone. The situation is hopeless until his oddball research offers a ray of hope.For specific features see interactive menu.SurgeonsDrama.WifeDeathDrama.WidowersDrama.Feature filmsUnited States.lcgftComedy films.lcgftVideo recordings for the hearing impaired.lcgftMartin, Steve,1945-Turner, Kathleen,1954-Warner, David,1941-Reiner, Carl,1922-drtGipe, George.Picker, David V.McEuen, William E.Warner Bros. Pictures (1969- )Warner Home Video (Firm).b2319397909-15-1008-28-09duf3fonwrhvv09-24-09mhiengcau46186726khC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02049cgm a2200505Ka 4500ocn809868852OCoLC20121017204545.0vd mvaizq120913s2012 cau390 e vleng d826663135367SF 13536Shout! FactorySF 13537Shout! FactorySF 13538Shout! FactorySF 13539Shout! Factory(OCoLC)809868852TEFMTTEFMTTEFNTGUtOrBLWNTGAPN1969.C65S74 2012792.7/623DVD 792.76 STESteve Martin[videorecording] :the television stuff /produced by Mary Sherwood.Television stuffLos Angeles, CA :Shout! Factory,c2012.3 videodiscs (390 min.) :sd., col., b&w ;4 3/4 in.DVD, Dolby Digital.Cataloged from container.Steve Martin, featuring special guest appearances by Dan Aykroyd, Laraine Newman, Lauren Hutton, John Belushi, Bill Murray, Lynn Redgrave, Paul Simon, David Letterman, Carl Reiner, Eric Idle, and Johnny Cash.The complete long-lost television specials of the one and only 'Wild and Crazy Guy' himself, with most material not seen for over 30 years. Includes several bonus clips, two music videos, SNL appearances and acceptance speeches.Special features: new interview with Steve Martin.The stand-up specials -- The NBC specials -- Bits and pieces.Martin, Steve,1945-Stand-up comedy.American wit and humor.Documentary television programs.lcgftTelevised performances.lcgftTelevised stand-up comedy routines.lcgftSherwood, Mary.Martin, Steve,1945-Shout! Factory (Firm)djn 201210179786314766429 (BT)hC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01327cim a2200361Ia 4500ocn181169841OCoLC20130614123748.0sd fsngnn|||eu071114s2007 nyu n b eng d978074356972907435697250-7435-6972-5Audioworks(OCoLC)181169841DV1DV1BAKEROCLCQUtOrBLWZ26ACD B MARTINMartin, Steve,1945-Born standing up[sound recording] :a comic's life /by Steve Martin.New York :Simon & Schuster,p2007.4 sound discs (4 hr.) :digital ;4 3/4 in."Unabridged"--Container.Compact discs.Read by the author.Martin reflects on his over 30 years in the comedy business, from writing to performing and everywhere in between. He offers listeners a once-in-a-lifetime opportunity to look into the mind of an artist at the top of his game.Martin, Steve,1945-EntertainersUnited StatesBiography.Audiobooks.lcgft.b2418043904-21-1004-03-10rn04-03-10mnnengnyu01220578C0Z26$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01782pam a22004214a 4500ocm71790035 OCoLC20071108125905.0060921s2006 caua e b 100 0 eng 20060313641597140570 (pbk. : alk. paper)9781597140577 (pbk. : alk. paper)(OCoLC)71790035DLCDLCYDXBAKERBTCTAYDXCPNTGUtOrBLWNTGATR820.5.I58 2006779.09222779.092 INTInternational Indigenous Photographers Conference(2006 :Davis, Calif.)Our people, our land, our images :international indigenous photographers /edited by Hulleah J. Tsinhnahjinnie and Veronica Passalacqua.Berkeley, Calif. :Heyday Books,c2006.xxiii, 71 p. :ill. (some col.) ;26 cm.Product of the International Indigenous Photographers Conference held in April 2006 at the University of California, Davis and the associated exhibit shown at the university's C.N. Gorman Museum..Includes bibliographical references.Documentary photographyExhibitions.Indigenous peoples in artExhibitions.Rites and ceremoniesExhibitions.Portrait photographyExhibitions.EthnologyExhibitions.Tsinhnahjinnie, Hulleah.Passalacqua, Veronica.C.N. Gorman Museum.Table of contents onlyhttp://www.loc.gov/catdir/toc/ecip071/2006031364.html.b2148217208-31-1011-08-07bebofwre12-19-07ma-engcau00khC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01039cam a2200337Ia 4500ocn816033399OCoLC20130318071805.0121104s2012 is e 000 0 heb d97896551223059655122301(OCoLC)816033399WEINBWEINBHLSOCLCOCOOPULNTGUtOrBLWNTGABF575.L7P45 2012ON ORDER880-01Pelled, Esther.880-02Le-orah ha-tsaḥ shel ha-metsiʼut /Ester Peled.Title on t.p. verso:In the Clear Light of Reality880-03Tel Aviv :Bavel,2012.226 p. ;22 cm.Loneliness.Psychoanalysis.Love.IsraelTel Aviv.100-01/rפלד, אסתר.245-02/rלאורה הצח של המציאות /אסתר פלד.260-03/rתל אביב :בבל,2012.aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02084cgm a2200565Ia 4500ocm60523483 OCoLC20130522202021.0vd cvaizu050531t20051993xxu105 e vleng d14157092549781415709252841887050159705015PBS Home Video(OCoLC)60523483EOWEOWVP@BTCTASTFOCLCQIAZUtOrBLWengengn-us---NTGABV3785.G69C78 1993262.2092269/.2/09222DVD B GRAHAMCrusade[videorecording] :the life of Billy Graham /produced by Cutting Edge Productions and WTTW/Chicago ; producer/director, Julian Norridge.Widescreen.[United States] :PBS Home Video,[2005], c1993.1 videodisc (ca. 105 min.) :sd., col. ;4 3/4 in.DVD; widescreen; Dolby digital.Closed-captioned.Narrated by Peter Graves.Executive producer, Randall Balmer.Not rated.Billy Graham has preached to more people than any other person in history. This program takes the viewer behind the scenes to meet Billy Graham himself and witness the power of the Graham organization as it mobilizes followers far and wide.Special feature: Personal visit with Billy Graham.Graham, Billy,1918-EvangelistsUnited StatesBiography.Documentary films.lcgftBiographical films.lcgftReligious films.lcgftVideo recordings for the hearing impaired.lcgftNorridge, Julian.Graves, Peter,1926-2010.Cutting Edge Productions.WTTW (Television station : Chicago, Ill.)PBS Home Video.cme2013-05-229786309100924 (BT)hC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02741cgm a2200613Ka 4500ocm70592076 OCoLC20130614123748.0vd cvaizs060718p20061988cau320 e vleng d14198343209781419834325794051260123E2601Warner Home Video(OCoLC)70592076Midwest Tapehttp://www.midwesttapes.comTEFMTengTEFMTRCSVP@OCLCQOCLCAOCLCQUtOrBLWe-fr---NTGAPN1992.77.A456 2006791.45/7522DVD FIC A SEASON 5 PT.2'Allo 'allo (Television program).Season 5.Selections.'Allo 'allo!The complete series five part deux[videorecording] /produced and directed by David Croft ; written by Jeremy Lloyd & David Croft.Allo 'allo!The complete series five part twoAllo 'allo!The complete series 5 part 2Burbank, CA :Warner Home Video,[2006]2 videodiscs (ca. 320 min.) :sd., col. ;4 3/4 in.DVD, region 1, full screen (4:3) presentation.Closed-captioned.Title from container.Gorden Kaye, Richard Marner, Carmen Silvera, Kim Hartman, Vicki Michelle.Originally broadcast on the BBC, 1988-89.MPAA rating: Not rated.René Artois is a man on the run. Acting as a pawn for the French Resistance, he spends his days running from the Gestapo, the Generals, and naturally, from all of the women who desperately yearn for his affection.The reluctant millionaires -- A duck for launch -- The exploding bedpan -- Going like a bomb -- Money to burn -- Puddings can go off -- Landmines for London -- Flight to Geneva -- Train of events -- An enigma variation -- Wedding bloss -- Down the drain -- All in disgeese.World War, 1939-1945FranceDrama.World War, 1939-1945Underground movementsFranceDrama.Man-woman relationshipsDrama.Television programsGreat Britain.lcgftComedy television programs.gsafdVideo recordings for the hearing impaired.lcgftComedyTelevision series.migfgCroft, David,1922-2011.Lloyd, Jeremy.Kaye, Gorden,1941-Marner, Richard,1921-2004.Silvera, Carmen,1922-2002.BBC Video (Firm)Warner Home Video (Firm)jab2013-05-31hC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02871njm a2200577Ia 4500ocn841264227OCoLC20130603174833.0g--- vlsd fungnn|m|edvd cvaizu130430s2005 ch ppnne d n chi d4710841050251PKXR0002TNT(OCoLC)841264227TEFTEFUtOrBLWchichia-ch---NTGAM1807.18.D46D36 2005782.4216/3095124/923CD PFE DEN D02 EAST ASIA880-01Deng, Lijun,1953-1995.880-02Dan dan you qing + shen qing jing xuan[sound recording] /Deng Lijun [Teresa Teng].880-03Deng Lijun dan dan you qing + shen qing jing xuan880-04Taibei Shi :Deng Lijun wen hua shi ye you xian gong si fa xing,2005.1 sound disc :digital ;4 3/4 in. +1 videodisc (4 3/4 in.)Sung in Chinese (Mandarin).Title from disc label.880-05Deng Lijun, vocals ; with various instrumental acc.880-06Zhi zuo, TNT Production Ltd.Compact disc.Lyrics inserted in container.880-07CD.Du shang xi lou --Dan yuan ren chang jiu --Ji duo chou --Fang cao wu qing --Qing ye you you --You shui zhi wo ci shi qing --Yan zhi lei --Wan ye qian sheng --Ren yue huang hun hou --Xiang kan lei yan --Yu shuo hai xiu --Si jun. xin shou lu dan qu:Tian shang tian wai tian wu ya --Bu zhao hen ji --Xiang gu shi ban wen rou --Ming yun zhi chuan --Wu ye wei feng.880-08DVD.Du shang xi lou --Si jun.Popular musicTaiwan1981-1990.Popular musicTaiwan1991-2000.Songs, ChineseTaiwan.Chinese language music recordingsMandarin.local100-01鄧麗君,1953-1995.245-02淡淡幽情+深情精選[sound recording] /鄧麗君 [Teresa Teng].246-03鄧麗君淡淡幽情 + 深情精選260-04台北市 :鄧麗君文化事業有限公司發行,2005.511-05鄧麗君, vocals ; with various instrumental acc.508-06製作, TNT Production Ltd.505-07CD.獨上西樓 --但願人長久 --幾多愁 --芳草無情 --清夜悠悠 --有誰知我此時情 --胭脂淚 --萬葉千聲 --人約黃昏後 --相看淚眼 --欲說還休 --思君. 新收錄單曲:天上天外天無涯 --不著痕跡 --像故事般溫柔 --命運之川 --午夜微風.505-08DVD.獨上西樓 --思君.cme2013-06-03jC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01502nam a2200397Ia 4500ocn828672220OCoLC20130522140351.0130228s2012 le a e 000 0 ara d97899532644489953264449(OCoLC)828672220MCCPPMCCPPNTGUtOrBLWarafreNTGA158.123158.1 KUZ ARABIC880-01Kuzmā, Hayyām.Savoir dire non aux autres et oui à soi-même: principes premiers de toute vie épanouie.Arabic880-02Kayfa aqūl la lil-akharīn wa-naʻam la-dhātī :al-Sabīl ila ḥaȳah haniʼat /Hayyām Kuzmā.880-03Bayrūt :Hāshīt Anṭuwān,2012.156 p. :ill. ;21 cm.In Arabic.How to say no to others and yes to yourself. A way to live a happy life.Self-acceptance.Self-confidence.Assertiveness (Psychology)Self-esteem.Arabic language editionNonfiction.local100-01/rكوزما, هيام.245-02/rكيف أقول لا للآخرين و نعم لذاتي :السبيل إلى حياة هنيئة /هيام كوزما.260-03/rبىروت :حاشىت أنطوان,2012.jab2013-05-22aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01644cam a2200409Ia 4500ocn719365709OCoLC20130517155904.0110502s2002 ru a e d 000 0 rusod9785170671557 (AST)5170671555 (AST)9785271278747 (Astrelʹ)5271278743 (Astrelʹ)(OCoLC)719365709CLECLECLEORXNTGUtOrBLWrusengNTGACB156.S573717 2009001.94 SIT RUSSIANSitchin, Zecharia.Lost book of Enki.Russian880-01Poteri︠a︡nnai︠a︡ kniga Ėnki :vospominanii︠a︡ i prorochestva Nazemnogo Boga /Zakharii︠a︡ Sitchin ; [perevod s angliĭskogo O. V. Gorshunovoĭ].880-02Moskva :AST :Astrelʹ,c2009.350 p. :ill. ;21 cm.In Russian.Translation of: Lost book of Enki.Includes glossary.Civilization, AncientExtraterrestrial influences.Lost booksHistory.Russian language editionNonfiction.local245-01Потерянная книга Энки :воспоминания и пророчества Наземного Бога /Захария Ситчин ; [перевод с английского О. В. Горшуновой].260-02Москва :АСТ :Астрель,c2009.700-00Горшуновой, О. В.st2013-05-17aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01193nas a2200385Ka 4500ocn843202227OCoLC20130614123749.0130516c20139999waubr p 0 a0pan d(OCoLC)843202227NTGNTGUtOrBLWpanengNTGAPER INDIndia-USA.India-USA.India-USA connecting communitiesKent, WA :India-USA Magazine,2013-v. :ill. ;26 cm.BimonthlyIn Punjabi and English.Description based on: April-May 2013.HealthPeriodicals.MedicinePeriodicals.Self-care, HealthPeriodicals.Panjabi languagePeriodicals.Panjabi-English language editionNonfiction.localEnglish-Panjabi language editionNonfiction.localAasra Punjabi English magazine.(OCoLC)70160664$6.00 // 6(CR) 5-13 NO PUB INFO FOUND 5-13 2013 APR/MAY 5-23-13 LB//st2013-05-16bC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01677nam a2200421Ma 4500ocn781674585OCoLC20130530150937.0120326s2012 ja e 000 0 jpn d97847612682204761268220(OCoLC)781674585TRCLSTRCLSNTGUtOrBLWjpnengNTGA158.1 SIE JAPANESE880-01Siebold, Steve.177 mental toughness secrets of the world class.Japanese880-02Ichiryū no hito ni manabu jibun no migakikata :zenbei kusshi no chōninki seminā kōshi ga denju suru jūni no seichō hōsoku /Siebold Steve;Yumiba Takashi.English title on cover :177 mental toughness secrets of the world class880-03Tōkyō :Kankishuppan,2012.237 p. ;20 cm.In Japanese.Translation of: 177 mental toughness secrets of the world class. SuccessPsychological aspects.Success.Mental efficiency.Psychology, Applied.Japanese language editionNonfiction.local880-04Yumiba, Takashi.100-01スティーブ・シーボルド.245-02一流の人に学ぶ自分の磨き方 :全米屈指の超人気セミナー講師が伝授する12の成長法則 /スティーブ・シーボルド著;弓場隆訳.260-03東京 :かんき出版,2012.700-04弓場隆.st2013-05-30aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01555cam a2200469 a 4500ocn841177098OCoLC20130524002337.0121205s2012 ch a e b 000 0 chi d 201248837397895705271249570527129(OCoLC)841177098TEFengTEFDLCNTGUtOrBLWlccopycata-cc---NTGABF1714.C5S457 2012133.5/925123133.59251 SHE CHINESE880-01Sheng xiao dong wu zhi tan /Zhang Zhijie zhu bian.880-02Chu ban.880-03Taibei Shi :Taiwan shang wu yin shu guan gu fen you xian gong si,2012.viii, 209 p. :ill. ;22 cm.880-04Shang wu ke pu guan ;12Includes bibliographical references.Astrology, Chinese.AnimalsChinaFolklore.Animals and civilizationChina.Chiese language editionNonfiction.local880-05Zhang, Zhijie.880-06Shang wu ke pu guan ;12.245-01生肖動物摭談 /張之傑主編.250-02初版.260-03臺北市 :臺灣商務印書館股份有限公司,2012.490-04商務科普館 ;12700-05張之傑.830-06商務科普館 ;12.csr2013-05-23aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01513cam a2200397Ia 4500ocn829392997OCoLC20130605182624.0130308s2012 ko a e 000 0 kor d97889605119728960511978(OCoLC)829392997MCCPPengMCCPPMCCPPUtOrBLWNTGA306.8723306.87 CHO KOREAN880-01Choe, Kwang-hyŏn.880-02Kajok ŭi tu ŏlgul =Two faces of my family : sarang hajiman sangchŏdo chugobannŭn nawa kawa kajok ŭi simriterapi /Choe Kwang-hyŏn chiŭm.Two faces of my family880-03Sarang hajiman sangch'ŏdo chugobannŭn nawa kawa kajok ŭi simriterapi880-04Chopan.880-05Sŏul :Puk'i,2012.261 p. :ill. ;21 cm.In Korean.FamiliesFamily relationships.Family crises.Korean language editionNonfiction.local100-01최광현.245-02가족의 두 얼굴 =Two faces of my family : 사랑하지만 상처도 주고받는 나와 가족의 심리테라피 /최광현 지음.246-03사랑하지만 상처도 주고받는 나와 가족의 심리테라피250-04초판.260-05서울 :부키,2012.jab2013-05-22aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02380cam a2200385Ia 4500ocm37920876 OCoLC20130614123749.0971111s1997 cau e 000 0 eng d 97070806 0898706408 (pbk.)()1819412BJOBJOMNJNTGOrPssOrLoB-BWaOLNUtOrBLWenggerNTGABX1751.2.R3413 1997282.092 RATRatzinger, Joseph.Salz der Erde.EnglishSalt of the earth :Christianity and the Catholic Church at the end of the millennium /Joseph Ratzinger ; an interview with Peter Seewald ; translated by Adrian Walker.Subtitle on cover:Church at the end of the millenniumSan Francisco :Ignatius Press,c1997.283 p. ;21 cm.Translation of Salz der Erde.The Catholic Faith; Words and Signs --Pt. 1.Personal Biography.Background and Vocation.The Young Professor.Bishop and Cardinal.The Prefect and His Pope --Pt. 2.Problems of the Catholic Church.Rome under Fire.On the State of the Church.The Situation in Germany.Causes of the Decline.The Mistakes of the Church.The Canon of Criticism --Pt. 3.On the Threshold of a New Era.Two Thousand Years of Salvation History - and Still No Redemption?Catharsis - A New Millennium - A Time of Testing.A "New Springtime of the Human Spirit" for the Third Millennium.Priorities of the Church's Development.Future of the Church - Church of the Future.The True History of the World.The highest ranking man in the Vatican next to the Pope gives an exclusive interview on the worldwide state of Catholicism to a highly regarded secular German journalist. "Salt of the Earth" explores a host of controversial and difficult issues facing Catholicism and Christianity at the end of the millennium.Ratzinger, JosephInterviews.Catholic ChurchDoctrinesHistory20th century.Catholic ChurchHistory20th century.Christianity20th century.Seewald, Peter..b1096058201-20-1009-01-04au(2)bebufrfwmvresh09-01-04ma-engcau027$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01157cam a22003738a 4500ocn743214099OCoLC20130614123749.0110720s2012 nyuac e b 001 0 eng 20110294379780816081592 (alk. paper)081608159X (alk. paper)(OCoLC)743214099DLCengDLCBTCTABDXYDXCPBWXOCLCOXXWELNTGUtOrBLWpccNTGABD331.B5925 201211023110 BOEBoersema, David.The Facts on File guide to philosophy.Reality, religion, and the mind /David Boersema, Kari Middleton.Reality, religion, and the mindNew York :Facts On File,c2012.x, 300 p. :ill., ports. ;24 cm.Includes bibliographical references (p. 278) and index.Reality.Metaphysics.ReligionPhilosophy.Philosophy of mind.Middleton, Kari.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01112cam a2200361 a 4500ocn743214104OCoLC20130116191141.0110720s2012 nyua e b 001 0 eng 20110294929780816084821 (alk. paper)0816084823 (alk. paper)(OCoLC)743214104DLCengDLCYDXBTCTABDXYDXCPBWXOCLCONTGUtOrBLWpccNTGABD161.B54 201212123121 BOEBoersema, David.The Facts on File guide to philosophy.Knowledge, logic, and science /David Boersema, Kari Middleton.Knowledge, logic, and scienceNew York :Facts on File,c2012.xi, 294 p. :ill. ;24 cm.Includes bibliographical references (p. 270) and index.Knowledge, Theory of.Logic.SciencePhilosophy.Middleton, Kari.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01022cam a2200337Ia 4500ocn761724599OCoLC20130614123749.0111121s2011 sp a e 000 0 spa d9788499170978 (pbk.)8499170978 (pbk.)(OCoLC)761724599JQWJQWJQWYDXCPBTCTABDXQBXUtOrBLWNTGABF1779.F4S56 2011133.3/33723133.3337 SHO SPANISHShoki, Futabei.Mejora tu vida con el feng shui /Futabei Shoki.Barcelona [Spain] :Grupo Robin Book,c2011.185 p. :ill. ;14 x 14 cm.Vital ;15In Spanish."Aprende a armonizar la energía que te rodea."--Cover.Feng shui.Spanish language editionNonfiction.localColección Vital ;15.lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02085cam a2200409Ia 4500ocn820149011OCoLC20130116205611.0121126s2013 nyuaf e b 001 0 eng d80003116097814391912551439191255(OCoLC)820149011(OCoLC)800031160TOHTOHJAGBTCTAJTHYDXCPACNCGPIFKNTGUtOrBLWNTGABF311.H646 2013158.123153.723153.7 HORHorowitz, Alexandra.On looking :eleven walks with expert eyes /Alexandra Horowitz.1st Scribner hardcover ed.New York :Scribner,2013.vii, 308 p., [4] p. of plates :ill. (some col.) ;24 cm.Includes bibliographical references (p. 267-294) and index.On Looking begins with inattention. It is about attending to the joys of the unattended, the perceived 'ordinary.' Horowitz encourages us to rediscover the extraordinary things that we are missing in our ordinary activities. Even when engaged in the simplest of activities like taking a walk around the block, we pay so little attention to most of what is right before us that we are sleepwalkers in our own lives.Amateur eyes --Inanimate city : the material of the landscape. Muchness ;Minerals and biomass ;Minding our Qs ;Into the fourth dimension --Animate city : everything that won't stand still. Flipping things over ;The animals among us ;A nice place (to walk) ;The suggestiveness of thumb-nails --Sensory city : things that hum, smell, or vibrate. Seeing/not seeing ;The sound of parallel parking ;A dog's-nose view ;Seeing it.Self-consciousness (Awareness)Self-actualization (Psychology)Perception.Cognition.Awareness.Environmental psychology.edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01180cam a22003493 4500ocn774493028OCoLC20130614123749.0120128s2012 sp e 000 0 spa d9788497778121 (pbk.)849777812X (pbk.)(OCoLC)774493028BTCTAengBTCTAYDXCPBDXH7HUUCNTGUtOrBLWspaengNTGABL624.M86314 2012154.222154.2 MUR SPANISHMurphy, Joseph.Maximize your potential through the power of your subconscious mind for health and vitality.SpanishMaximice su potencial mediante el poder de su mente subconsciente para generar salud y vitalidad /Joseph Murphy ; [traducción, David M. George].Salud y vitalidad1. ed.Barcelona :Ediciones Obelisco,2012.194 p. ;21 cm.New Thought.HealthPsychological aspects.Spanish language editionNonfiction.localcsr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01468cam a22003618a 4500ocn771057345OCoLC20130116163137.0120104s2012 nyua e 000 0 eng 2011052228016086892Uk97814555165991455516597(OCoLC)771057345DLCengDLCBTCTABDXYDXCPZS3UKMGBNTGUtOrBLWpccNTGABF697.5.S43B53 2012158.123158.1 BLA Black, Christa.God loves ugly :& love makes beautiful /Christa Black.1st ed.New York, NY :FaithWords,2012.xii, 205 p. :ill. ;22 cm.The love bucket :(I'm still here) --Sticks and stones :(God loves ugly) --Puddle jumping :(Black Monday) --There's always a reason : (the grass is always greener) --Jesus/Steve : (feel so good) --Monsters : (drift away) --Forty days and a horse :(California sunshine) --Lessons in love : (lonely) --The destination : (the view).Examines the author's battle with self-loathing and depression, and how her belief in God's unconditional love allowed her to love herself as well.Self-perception.Self-perceptionReligious aspects.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00865nam a2200313Ia 4500ocn813166496OCoLC20130614123749.0121016s2012 nyu e 001 0 eng d97816144817821614481784(OCoLC)813166496JRGJRGNTGUtOrBLWNTGA158.1 KOBKobrin, Neil.Emotional well-being :embracing the gift of life /Neil Kobrin.New York :Morgan James Publishing,2012.246 p. ;23 cm.Attention.Stress management.Awareness.Self-actualization (Psychology)Mindfulness-based cognitive therapy.Well-being.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01984cam a2200313Ka 4500ocn794709257OCoLC20130614123749.0120602s2013 nyu e 000 0 eng d9781614483212 (pbk.)1614483213 (pbk.)9781614483229 (ebook)(OCoLC)794709257BTCTAengBTCTAYDXCPNTGUtOrBLWNTGA158.123158.1 MALMalouf, Gregory Nicholas.Silent :the power of silence /Gregory Nicholas Malouf.New York :Morgan James Pub.,2013.xxiii, 265 p. ;23 cm."All too many of us create an outer persona or ego self in order to cope with the fear and anxiety we feel on a daily basis. Our ego self normalizes these intense emotions and stops us from feeling anything at all as we drive ourselves towards our goals of financial status and success. In his book, Silent, Gregory Nicholas Malouf asks that we start to recognize what we feel and in doing so, face our truth. Once we are able to acknowledge, accept and forgive ourselves for being afraid, we are able to stop running away, live in the present and so create the life we most desire. In the spirit of Eckhart Tolle and Marianne Williamson, Malouf, founder of Epsilon Healing Academy, frankly reveals his journey from an abusive childhood that will shock you and move you, to his empty existence as a wealthy and highly successful entrepreneur living 'the dream'. A real account of life at its worst and the lessons Malouf learnt to transform it to life at its best, to reconnect with your true, inner self and find the power of the silence within to lead the life you most desire"--Amazon.com.Malouf, Gregory Nicholas.Self-actualization (Psychology)Identity (Psychology)slh2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01300cam a2200349Ia 4500ocn761313353OCoLC20130116224609.0111117s2008 sp a e 000 0 spa d7096826739788496707542 (pbk.)8496707547 (pbk.)(OCoLC)761313353(OCoLC)709682673WIVengWIVQBXBTCTAYDXCPBDXOCLCQOCLCOKNJUtOrBLWspaitaNTGAHM1106.F35 2008158.2 FAL SPANISH 2008Falworth, Omar.El arte de-- vivir bien con los demas :sencillos pero eficaces consejos para los que quieren conocer a los demás, estar bien con todos y tener verdaderos amigos /Omar Falworth ; [traducción de la edición italiana: Julieta Carmona Lombardo].Vivir bien con los demas3a ed.Barcelona :Terapias Verdes,2008.1 v. (unpaged) :ill. ;22 cm.Edición original: L'arte di-- vivere bene con gli altri.Interpersonal relations.Spanish language editionNonfiction.localCarmona Lombardo, Julieta.lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01336cam a2200349Ma 4500ocn794592518OCoLC20130116174415.0120531s2012 mx a e b 000 0 spa d60711078819786071107886(OCoLC)794592518YDXCPYDXCPBTCTABDXTXBXLOCLCOMLYJQMSJPUtOrBLWNTGA305.4223158 VAR SPANISHVargas de González Carbonell, Gabriela.Soy mujer, soy invencible, y estoy exhausta! /Gaby Vargas.México :Punto De Lectura,2012.411 p. :ill. ;19 cm.Includes bibliographical references.Provides advice for women, based on the author's experiences and knowledge of Mexican women, about balancing relationships, work, family life, health, and personal development.Contenido: Yo con mi pareja --Yo y mi trabajo --Yo con mi familia --Yo y mi salud - Yo conmigo mismaWomenPsychology.WomenSocial conditions.Interpersonal relations.Social sciencesSociology.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01096cam a2200361 a 4500ocn743214105OCoLC20130116201107.0110720s2012 nyua e b 001 0 eng 20110294939780816084838 (alk. paper)0816084831 (alk. paper)(OCoLC)743214105DLCengDLCYDXBTCTABDXYDXCPBWXNTGUtOrBLWpccNTGABJ1012.B56 201217023170 BOEBoersema, David.The Facts on File guide to philosophy.Values and the good life /David Boersema, Kari Middleton.Values and the good lifeNew York :Facts On File,c2012.x, 308 p. :ill. ;24 cm.Includes bibliographical references (p. 280) and index.Ethics.Political sciencePhilosophy.Aesthetics.Middleton, Kari.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01587cam a2200361 a 4500ocn741751372OCoLC20130116184237.0110714s2012 nyua e b 001 0 eng 2011029436 201101686497808160815850816081581(OCoLC)741751372DLCengDLCYDXBDXYDXCPBWXOCLCOWSONTGUtOrBLWpccNTGAB72.B66 201219023190 BOEBoersema, David.The Facts on File guide to philosophy.History of Western philosophy /David Boersema, Kari Middleton.History of Western philosophyNew York :Facts on File,c2012.xi, 426 p. :ill. ;24 cm.Includes bibliographical references (p. 400) and index.Introductory discussion questions --The Greek miracle --Elements and the nature of change --Heraclitus and change --Changelessness and mathematics --Greek Atomism --Sophists and Socrates --Plato --Aristotle --Hellenistic philosophy --Medieval and Renaissance philosophy --Concluding discussion questions.Offers a historical introduction to Western philosophy, describing notable philosophers and movements such as Aristotle, Socrates, existentialism, analytic philosophy, and logical positivism.PhilosophyHistory.Middleton, Kari.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01361cam a22003258a 4500ocn779266328OCoLC20130116221114.0120319s2012 nyu e 000 0 eng 201200866297814516090281451609027(OCoLC)779266328DLCengDLCBTCTABDXOCLCOYDXCPBWXNTGUtOrBLWpccNTGABT752.R565 201224323243 ROLRollins, Peter.The idolatry of God :breaking our addiction to certainty and satisfaction /Peter Rollins.New York :Howard Books,2012.viii, 208 p. ;21 cm.The church shouldn't do worship, the charts have it covered --On not getting what you want, and liking it --Hiding behind the mask that we are --Be part of the problem, not the solution --Trash of the world --The fool says in his heart, "There is knowing god" --I need your eyes in order to see myself --Destroying Christianity and other Christian acts --Want to lose belief?: join the church --Conclusion: a faith full of signs and wonders.SalvationChristianity.Christian life.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02107cam a22003858a 4500ocn788298783OCoLC20130116212531.0120614s2012 inu e b 000 0aeng 20120221789781594712920 (pbk.)1594712921 (pbk.)9781594713507 (e-book)1594713502 (e-book)(OCoLC)788298783DLCengDLCBTCTAOCLCOBDXORXNTGUtOrBLWpccNTGABX4705.H3337A3 2012248.2/42092B23248.242092 HAWHaw, Chris,1981-From Willow Creek to Sacred Heart :rekindling my love for Catholicism /Chris Haw.Notre Dame, Ind. :Ave Maria Press,c2012.xix, 234 p. ;22 cm.Includes bibliographical references (p. 211-234).Action(with some contemplation) --From mass to megachurch --From class to streets --From streets to jungle --From jungle to war --From war to concrete jungle --Contemplation(with some action) --Murder and the mass --Pagan Christianity --Search for no accent: or, the impossibility of nondenominationalism --On being part of a terrible organization: or, how to treat the church like a (dysfunctional) family --Art and apocalypse.All Christians believe the same thing, right? Haw, co-author of Jesus for President, would whole-heartedly disagree. But he expresses that disagreement with a refreshing dose of respect, objectivity, and love. The author is an expert at describing the beauty and edifying character of both evangelical Christianity and Roman Catholicism, as well as the numerous challenges they face. After a number of years immersed in Protestantism, he ultimately chooses to return to the Catholic Church into which he was born.Haw, Chris,1981-ConversionCatholic Church.Christian biography.edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01215cam a2200373Ia 4500ocn819324712OCoLC20130116230236.0121126s2011 flu e b 000 0 spa d97807899170410789917041(OCoLC)819324712HT#HT#BDXUtOrBLWspaengNTGABV4627.A5 A548 2011248.4 AND SPANISHAnderson, Neil T.,1942-Getting anger under control.SpanishControla tu ira /Neil T. Anderson y Rich Miller.1a ed.Miami, FL :Editorial Unilit,c2011.248 p. ;21 cm.Text in Spanish."Vence: el resentimiento sin resolver, las emociones abrumadoras, las mentiras detrás de la ira."Translation of: Getting anger under control.Includes bibliographical references (p. 243-248).AngerReligious aspectsChristianity.Spanish language editionNonfiction.localMiller, Rich,1954-lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01410cam a2200361 a 4500ocn783138158OCoLC20130116221955.0120418s2012 miua e b 000 0 eng 20120132829780310494843 (hardcover)0310494842 (hardcover)(OCoLC)783138158DLCengDLCYDXBTCTABDXYDXCPBWXNTGUtOrBLWpccNTGABV4647.H67S73 201225323253 STAStanley, Andy.Deep & wide :creating churches unchurched people love to attend /Andy Stanley.Deep and wideGrand Rapids, Mich. :Zondervan,c2012.350 p. :ill. ;23 cm.Includes bibliographical references (p. 347-350).Author and pastor Andy Stanley draws from Scripture and over 25 years of pastoral experience to communicate to church leaders how they can create a church where believers can have a growing faith in Jesus and at the same time unbelievers can make a vital and lasting connection---a ministry that is both deep and wide.HospitalityReligious aspectsChristianity.Non-church-affiliated people.Church marketing.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01021nam a2200337Ia 4500ocn810436331OCoLC20130116234631.0120921s2011 ve c e 000 0 spa d97898036908239803690825(OCoLC)810436331HT#HT#NTGUtOrBLWNTGA299.93 MEN SPANISH 2011Méndez, Conny.Piensa lo bueno y se te dará /Conny Méndez.2d ed.Caracas, Venezuela :Ediciones Giluz :Bienes Lacónica :Distribuidora Gilavil,2011.111 p. :ill. ;20 cm.Colección metafísicaNew Age movementReligion.Occultism.Metaphysics.Spiritual lifeNew Age movement.Spanish language editionNonfiction.localColección Metafísica.csr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01635cam a2200373 a 4500ocn779266207OCoLC20130116181755.0120316s2013 nyu e 000 0aeng 20120107319781439190586 (hbk.)1439190585 (hbk.)9781439190616 (ebk.)1439190615 (ebk.)(OCoLC)779266207DLCengDLCIG#BTCTABDXYDXCPGK8ILCNTGUtOrBLWNTGACT275.S5918A3 2013302.34092B23302.34092 SONSonnenberg, Susanna,1965-She matters :a life in friendships /Susanna Sonnenberg.1st Scribner hardcover ed.New York :Scribner,2013.vii, 255 p. ;24 cm.Illuminates the friendships that have influenced, nourished, inspired, and haunted the author--and sometimes torn her apart--each of which has its own lessons that she seeks to understand.She matters --Young. Women are like this ;Real friends ;Facebook ;Proctor duties ;The root cellar --Aware. Roommate ;Homesick ;Annabelle upstairs ;Blind date ;Evidence ;Within reach ;Kindling --Awake. We turn into mothers ;Orphan girl ;Naked ;Boundaries ;Ritual ;Real estate ;The four seasons ;As we both know.Sonnenberg, Susanna,1965-Sonnenberg, Susanna,1965-Friends and associates.Female friendship.edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01814cam a2200397Ia 4500ocn747529314OCoLC20130116225139.0110818s2011 mx e 000 0 spa d97860748017816074801789(OCoLC)747529314BTCTAengBTCTAYDXCPEXRGPIH7HUtOrBLWspaengNTGAF1221.Y3C373718 2011306.092 WOL SPANISHWolf, Felix.Art of navigation.SpanishLas enseñanzas de Carlos Castaneda :aprendizaje y espiritualidad /Felix Wolf ; [Verónica Gerber Bicecci por la [traducción].1a. ed. en México.México, D.F. :Vergara,c2011.335 p. ;23 cm.Text in Spanish.Titulo original: The art of navigation : travels with Carlos Castaneda and beyond. Originally published: San Francisco : Council Oak Books, 2010."Millenium"--Added t.p. and p. [4] of cover.Con fidelidad, respeto y humildad por la vida, Las enseñanzas de Carlos Castaneda us un libro que comparte una visión distinta al actual sentimiento de hartazgo y desesperanza que prepondera en nuestra sociedad, brindándonos la oportunidad de maravillarnos por el simple hecho de ser día a día seres de luz capaces de fortalecer nuestro cuerpo y espíritu con la bendición de la tierra, y lo que hay en ella.Castaneda, Carlos,1931-1998Criticism and interpretation.Yaqui IndiansReligion.Shamanism.Nagualism.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01103cam a2200337Ia 4500ocn767569919OCoLC20130116210527.0111203s2011 sp a e b 000 0 spa d9788441530218 (pbk.)8441530211 (pbk.)(OCoLC)767569919BTCTAengBTCTAYDXCPBDXC3LKNJNTGUtOrBLWNTGA302.23123323.445 BER SPANISHBergareche Sainz de los Terreros, Borja,1977-Wikileaks confidencial /Borja Bergareche Sainz de los Terreros ; prólogo de Rosental Calmon Alves.Madrid, Spain :Anaya Multimedia,2011.213 p. :ill. ;23 cm.800 booksIncludes bibliographical references (p. 207-213)WikiLeaks (Organization)Assange, Julian.News Web sites.Freedom of information.Spanish language editionNonfiction.localcsr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01634cam a2200349Ia 4500ocn785079137OCoLC20130116202802.0120407r20122011nyuaf e b 001 0 eng d97816059839811605983985(OCoLC)785079137BTCTAengBTCTABDXYDXCPJAGOCLCOB@LPATCO2BURNTGUtOrBLWNTGAUB251.G7C67 2012327.124100923327.1241 CORCorera, Gordon.The art of betrayal :the secret history of MI6 /Gordon Corera.New York :Pegasus Books,c2012.481 p., [16] p. of plates :ill. ;24 cm.Originally published: London: Weidenfeld & Nicolson, 2011.Includes bibliographical references (p. 414-461) and index.From Berlin to the Congo, from Moscow to the back streets of London, these are the stories of the agents on the front lines of British intelligence. And the truth is often more remarkable than fiction.Into the shadows : life and death in Vienna --The cost of betrayal --A river full of crocodiles : murder in the Congo --Moscow rules --The wilderness of mirrors --Compromising situations --Escape from Moscow --The Afghan plains --Out of the shadows --In the bunker.Great Britain.MI6History20th century.Intelligence serviceGreat BritainHistory20th century.SpiesGreat Britain.edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02607cam a2200373 a 4500ocn772099444OCoLC20130116185343.0120509s2013 mau e b 001 0 eng 20120173239780547317274054731727140021729534(OCoLC)772099444DLCengDLCBTCTABDXYDXCPBWXMOFBURABGVP@YUSNTGUtOrBLWpccn-us---n-us-nyNTGAHG4910.W357 2013332.63/2097323332.632097 WEAWeatherall, James Owen.The physics of Wall Street :a brief history of predicting the unpredictable /James Owen Weatherall.Boston :Houghton Mifflin Harcourt,2013.xviii, 286 p. ;24 cm.Includes bibliographical references (p. [250]-268) and index.While many of the mathematicians and software engineers on Wall Street failed when their abstractions turned ugly in practice, a special breed of physicists has a much deeper history of revolutionizing finance. From fin-de-siècle Paris to Rat Pack-era Las Vegas, from wartime government labs to Yippie communes on the Pacific coast, Weatherall shows how physicists successfully brought their science to bear on some of the thorniest problems in economics, from options pricing to bubbles. The 2008 crisis was partly a failure of mathematical modeling, but even more, it was a failure of some very sophisticated financial institutions to think like physicists. Models--whether in science or finance--have limitations; they break down under certain conditions. And in 2008, sophisticated models fell into the hands of people who didn't understand their purpose, and didn't care. It was a catastrophic misuse of science. The solution, however, is not to give up on models; it's to make them better. Weatherall reveals the people and ideas on the cusp of a new era in finance. This book is riveting history that will change how we think about our economic future.--From publisher description.Of quants and other demons --Primordial seeds --Swimming upstream --From coastlines to cotton prices --Beating the dealer --Physics hits the street --The prediction company --Tyranny of the Dragon King --A new Manhattan project --Send physics, math, and money!SecuritiesUnited States.Wall Street (New York, N.Y.)edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01277cam a2200349Ia 4500ocn757473443OCoLC20130116201544.0111012s2011 sp b e 000 0 spa d97884152561518415256159(OCoLC)757473443BTCTAengBTCTAYDXCPBDXQBXTXGNTGUtOrBLWspafreNTGAQ172.5.E77B3818 201150023500 BAU SPANISHBaudet, Jean(Jean C.)Curieuses histoires de la science.SpanishNuevas historias curiosas de la ciencia :[relatos fascinantes sobre descubrimientos fundamentales y errores imperdonables] /Jean C. Baudet ; traducción de Eva Jiménez Julià.Teià, Barcelona :Ma Non Troppo,c2011.235 p. :ports. ;23 cm.Subtitle from cover.Translation of: Curieuses histoires de la science : quand les chercheurs se trompent.Errors, ScientificHistoryPopular works.ScienceHistoryPopular works.Spanish language editionNonfiction.localcsr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01627cam a2200409Ia 4500ocn761850659OCoLC20130614123750.0111117s2011 sp a e b 001 0 spa d97884807692978480769297(OCoLC)761850659BTCTAengBTCTAYDXCPBDXZP7QBXTXGGPISAPNTGUtOrBLWspafreNTGAQE862.D5M5218 2011567.922567.9 MIC SPANISHMichard, Jean-Guy.Monde perdu des dinosaures.SpanishEl mundo perdido de los dinosaurios /Jean-Guy Michard ; [traducción, documentación y adaptación, Manuel Pijoan Rotgé].1a ed. en lengua española.Barcelona :Blume,2011.143 p. :ill. (some col.) ;18 cm.Biblioteca ilustrada ;11Descubrir la ciencia y la técnicaTranslation of: Le monde perdu des dinosaures.Elogio de la diversidad --En busca de una identidad --Historia natural de un dinosaurio ordinario --La ciencia puesta en duda --Ser dinosaurio en un mundo en plena mutación.Includes bibliographical references (p. 136) and index.DinosaursPopular works.Pijoan Rotgé, Manuel.Biblioteca ilustrada (Blume Ediciones (Barcelona, Spain)) ;11.Biblioteca ilustrada.Descubrir la ciencia y la técnica.csr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00884cam a2200277Ia 4500ocn768169362OCoLC20130116174706.0111208s2011 mx e 000 0 spa d9786073102698 (pbk.)6073102690 (pbk.)(OCoLC)768169362BTCTAengBTCTAYDXCPBDXJTBUtOrBLWNTGA613.25 COR SPANISHCordoniz, Andréa.Exorciza a tu alma gorda :una guía para cambiar tus hábitos y adelgazar para siempre /Andréa Cordoniz ; traducción Pilar Obón.México, D.F. :Grijalbo,2011.135 p. ;15 cm.Weight loss.Spanish language editionNonfiction.localObón, Pilar.lmc2013-01-03aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01496cam a2200361 a 4500ocn796758568OCoLC20130116220539.0120913s2013 nju e b 001 0 eng 20120300489781118185117 (hbk.)1118185110 (hbk.)9781118235966 (ebk.)1118235967 (ebk.)(OCoLC)796758568DLCengDLCIG#BTCTABDXOCLCOYDXCPWIQNTGUtOrBLWNTGARM222.2.W4513 2013613.2/523613.25 WHYWhyte, John,1953-AARP new American diet :lose weight, live longer /John Whyte.Hoboken, N.J. :John Wiley & Sons,c2013.xiv, 226 p. ;25 cm.Includes bibliographical references and index.The promise of weight loss and a longer life --The healing power of food --AARP new American diet nutrition basics --You don't have to be overweight --Dealing with emotional eating and cravings --AARP new American diet guidelines --The meal plans: 7, 14, and 30 days --AARP new American diet recipes --Reducing your risk of disease: cancer, diabetes, and heart disease --Eat well, get fit, sharpen your brain.Weight loss.Reducing dietsPsychological aspects.Nutrition.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01288cam a2200397Ia 4500ocn752071979OCoLC20130614123750.0110909s2011 sp a e 001 0 spa d9788425519727 (pbk.)8425519721(OCoLC)752071979BTCTAengBTCTAYDXCPZQPSAPBDXORXUtOrBLWspagerNTGA613.723613.7 GRA SPANISHGrabbe, Dieter.Gute Figur Quickies.SpanishUna buena figura rápido :ponte en forma y mejora tu salud : 10 minutos al día de ejercicios para gente con poco tiempo /Dieter Grabbe ; [traducción, Margarita Gutiérrez].Buena figura rápidoBarcelona :Hispano Europea,c2011.96 p. :col. ill. ;23 cm.Translation of: Gute Figur Quickies.Includes index.Physical fitness.Exercise.Reducing exercises.Health.Weight loss.Spanish language editionNonfiction.localGutiérrez, Margarita.lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01785cam a2200373Ka 4500ocn756584063OCoLC20130614123750.0111005s2012 cau e b 000 0 spa d97814019394581401939457(OCoLC)756584063BTCTAengBTCTAYDXCPBDXOQXIGANTGUtOrBLWspaengNTGARA776.95.M2618 2012613 MAN SPANISHMancini, Fabrizio,1965-Power of self-healing.SpanishEl poder de la autosanación :libera tu potencial natural de sanación en 21 días /Fabrizio Mancini ; [foreword by Wayne W. Dyer ; traducción, Adriana Miniño].Carlsbad, Calif. :Hay House,c2012.xx, 306 p. ;23 cm.Translation of: The power of self-healing : unlock your natural healing potential in 21 days.Includes bibliographical references.Para la mayoría de nosotros no es extraño tener problemas de salud, enfermedades o dolor. Pero, ¿y si hubiera una solución para restaurar nuestro bienestar que no involucrara fármacos, cirugía u otros procedimientos médicos? Pues bien, la hay, y la encontrarás en el interior de tu propio cuerpo. ¡Asi es! Tu cuerpo tiene integrada la capacidad de sanación: un sistema sorprendente de autoreparación que trabaja día y noche, y está bajo tu control fomentarla. --Publisher.Self-care, Health.Healing.Mind and body therapies.Spanish language editionNonfiction.localMiniño, Adriana.csr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01696cam a2200385Ia 4500ocn798057520OCoLC20130614123750.0120627s2012 flu e 000 0aspa d97816143565851614356580(OCoLC)798057520BTCTAengBTCTABDXGZDAZTOCLCOGPIUtOrBLWspaengn-us---NTGA616.99437 PAU SPANISHPausch, Jai,1966-Dream new dreams.SpanishVolver a soñar :reimaginar la vida después de perder a un ser querido /Jai Pausch.1a ed.Doral, FL :Aguilar,2012.xv, 236 p. ;21 cm.Título original: Dream new dreams.En Volver a soñar, Jai Pausch narra por primera vez su propia historia: el tránsito emocional de esposa y madre a cuidadora a jornada completa que viajaba incansablemente entre sus tres hijos pequeños y los hospitales en los que Randy era sometido a tratamiento, lejos de su hogar; y posteriormente a viuda y madre sin esposo en lucha constante por mantener el sentido de estabilidad en su familia, y hacerle frente a su propio dolor y a los retos cotidianos del hogar ...Pausch, Jai,1966-Women caregiversUnited StatesBiography.PancreasCancerPatientsFamily relationshipsUnited States.Pausch, RandyHealth.DeathPsychological aspects.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01474cam a2200349 a 4500ocn775406332OCoLC20130116022401.0120822s2013 nyua e b 001 0 eng 20120286929780757003493 (pbk.)0757003494 (pbk.)(OCoLC)7754063321350030QBIDLCengDLCQBXBTCTABDXOCLCOORXVETJO8NTGUtOrBLWNTGARC901.7.H45S64 2013617.4/6105923617.461059 SNYSnyder, Rich.What you must know about dialysis :the secrets to surviving and thriving on dialysis /Rich Snyder.DialysisGarden City Park, NY :Square One Publishers,c2013.vii, 197 p. :ill. ;23 cm.Includes bibliographical references (p. 177-188) and index.For most people, dialysis is an unplanned and uncharted journey. In this book, Dr. Snyder provides you with a powerful beacon to light the path ahead with compassion and with rock-solid answers to your most important questions. This book not only answers common questions about dialysis, but, also, offers practical advice, commonsense strategies and complementary options.HemodialysisPatients.HemodialysisPopular works.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00965cam a2200313Ia 4500ocn801698548OCoLC20130116153058.0120725s2012 sp a e b 000 0 spa d97884255202118425520215(OCoLC)801698548JQWJQWJQWUtOrBLWNTGA636.7088723636.70887 SEW SPANISHSewell, Ken.Adiestramiento que funciona :obediencia básica /Ken Sewell.Obediencia básicaBarcelona :Hispano Europea,c2012.96 p. :col. ill. ;23 cm. +1 videodisc (120 min. : sd., col. ; 4 3/4 in.)In Spanish.Includes bibliographical references (p. 94-95).DogsTraining.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01453cam a2200373Ia 4500ocn796276333OCoLC20130614123750.0120614s2012 enka e d 000 0 eng dGBB259136bnb016106552Uk7882882049780199640249 (hbk.)0199640246 (hbk.)(OCoLC)796276333(OCoLC)788288204UKMGBengUKMGBOCLCOYDXCPBWKYNKBWXEYMORXOCLCOBTCTABDXNTGUtOrBLWNTGATX349.A86 2012641.0323641.03 AYTAyto, John.The diner's dictionary :word origins of food & drink /John Ayto.2nd ed.Oxford :Oxford University Press,c2012.x, 405 p. :ill. ;21 cm."First edition published in 1990 by Routledge as The glutton's glossary; Revised edition publsihed 1993 by arrangement with Routledge as The diner's dictionary; First issued as an Oxford University Press paperback 1994 as The Gourmet's guide; Second edition published 2012 by arrangement with Routledge as The diner's dictionary"--T.p. verso.FoodDictionaries.BeveragesDictionaries.CookingDictionaries.Ayto, John.Glutton's glossary.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02196cam a2200481Ia 4500ocn824512563OCoLC20130614123750.0130116s2012 ch a e b 001 0 chi d9789866179341 (v. 1)9866179346 (v. 1)(OCoLC)824512563NTGNTGNTGUtOrBLWchiengNTGA641.523641.5 MCG CHINESE V.1McGee, Harold.Keys to good cooking.Chinese880-01Chu yi zhi yao.Shang =Keys to good cooking: a guide to making the best of foods and recipes /[Haluode.maji (Harold McGee) zhu ; Deng Ziijin yi].Chu yi zhi yao. 1Keys to good cooking: a guide to making the best of foods and recipes880-02Chu ban.880-03Xin bei shi :Da jia chu ban :Yuan zu wen hua fa xing,2012.xvii, 267, xxxv :ill. ;23 cm.In traditional Chinese and some English.Translation of: Keys to good cooking: a guide to making the best of foods and recipes.880-04"Wan quan zhang wo chu fang,wan mei liao li shi cai"--Cover.A one-stop reference answers nearly every kitchen conundrum the home cook may have in a single volume, from equipment and cooking methods to how to handle nearly every ingredient. By the author of On Food and Cooking: The Science and Lore of Kitchen.Includes bibliographical referencesand index.Cooking.Food.Chinese language edition (Traditional)Nonfiction.local880-05Deng, Zijin.245-01廚藝之鑰.上 =Keys to good cooking: a guide to making the best of foods and recipes /[哈洛德.馬基(Harold McGee)著 ; 鄧子衿譯]250-02初版260-03新北市 :大家出版 :遠足文化發行,2012.500-04"完全掌握廚房,完美料理食材"--Cover.700-05鄧子衿,translatorst2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$03263cam a22003978a 4500ocn741542746OCoLC20130116175518.0111213s2013 nyua e b 001 0 eng 201104443797814516556811451655681(OCoLC)741542746DLCengDLCBTCTAYDXCPBDXOCLCOZS3IK2NTGUtOrBLWpccNTGATX837.P67 2012641.5/63623641.5636 PORPorter, Jessica.The MILF diet :let the power of whole foods transform your body, mind, and spirit --deliciously /Jessica Porter.1st Emily Bestler/Atria Books hardcover ed.New York :Atria Books,2013.vii, 360 p. :col. ill. ;24 cm.Includes bibliographical references (p. 347) and indexes."From the acclaimed author of The Hip Chick's Guide to Macrobiotics, the MILF diet is what happens when the Skinny Bitch grows up and gets married. MILF has become a popular slang term for sexually attractive older women. But Jessica Porter wants women to own the term and reunite sexuality with the special gifts of womanhood. One of the quickest routes to becoming a MILF is by eating whole, natural foods and letting go of the processed stuff. As a result, the female body finds its peaceful home again. A MILF feels energized, yet relaxed and clear headed.The MILF Diet is a practical cookbook for women who want to achieve optimal health and happiness. It includes not only seventy-five photographs and recipes but tips for keeping a MILF-y kitchen and food plan, and MILF weight loss. Despite being centered around whole grains, organic seasonal vegetables, sea vegetables, natural sweeteners, and mostly plant-based proteins--while suggesting giving up dairy, white sugar, processed foods, and meat--the diet can be followed by non-vegetarians, vegetarians, and vegans alike. By eating the whole, natural and delicious foods of The MILF Diet, a MILF-to-be will not only turn back the clock, gain a spicier sex life and find her inner goddess, she will also strengthen her immune system, experience a newfound energy and reduce her risk of disease. Your DILF won't know what hit him"--Provided by publisher."The MILF Diet is a practical cookbook for women who want to achieve optimal health and happiness. It will include not only 75-100 recipes but tips for keeping a MILF-y kitchen and food plan, MILF weight loss, and MILF beauty tips! Despite being centered around whole grains, organic seasonal vegetables, sea vegetables, natural sweeteners, mostly plant-based proteins, and suggesting giving up dairy, white sugar, processed foods and factory-farmed meat, the diet can be followed by non-vegetarians, vegetarians, and vegans alike"--Provided by publisher.Vegetarian cooking.Natural foods.WomenHealth and hygiene.Cookbooks.lcgftCover imagehttp://www.netread.com/jcusers2/1247/681/9781451655681/image/lgcover.9781451655681.jpgedl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01735nam a2200457Ii 4500ocn798307498OCoLC20130614123750.0120706s2012 ch ap e 000 0dchi d97898661992959866199290(OCoLC)798307498CYTCYTchiCYTNTGUtOrBLWNTGA641.824 WAN CHINESE880-01Wang, Anqi.880-02100 "%"cheng gong!chao jian dan de song bing fen fa shi xian dian :yong song bing fen qing song zuo chu mei ge ren du xi ai de 70 zhong fa shi xing fu dian xin /Wang Anqi zuo.880-03100 cheng gong!chao jian dan de song bing fen fa shi xian dian.880-04Taipei shi :Bang lian wen hua,2012.125 p. :col. ill. ;26 cm.880-05Zi wei guan ;61880-06Wang an qi yong an ni.In traditional Chinese.Pies.Pastry.Quiches (Cooking)Cooking, French.Cookbooks.lcgftChinese language edition (Traditional)Nonfiction.local880-07Zi wei guan ;61.100-01王安琪245-02100%成功!超簡單的鬆餅粉法式鹹點 :用鬆餅粉輕鬆做出每個人都喜愛的70種法式幸福點心 /王安琪作246-03100成功!超簡單的鬆餅粉法式鹹點260-04臺北市 :邦聯文化,2012.490-05滋味館 ;61.500-06王安琪用安妮830-07滋味館 ;61.st2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01130cam a2200337Ia 4500ocn809123806OCoLC20130116201305.0120822s2012 ohua e 001 0 eng d016156395Uk9781440321016 (hbk.)1440321019 (hbk.)(OCoLC)8091238061349552QBIUKMGBengUKMGBOCLCOBDXA6QQBXOQXVP@NTGUtOrBLWNTGATT180.M55 2012684.0823684.08 MILMiller, Jeff,1956-The foundations of better woodworking :[how to use your body, tools and materials to do your best work] /by Jeff Miller.WoodworkingCincinnati, Ohio :Popular Woodworking ;Georgetown, Ont. :Distributed in Canada by Fraser Direct,c2012.192 p. :col. ill. ;29 cm.Subtitle from cover.Includes bibliographical references (p. 188) and index.Woodwork.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01783cam a2200457Ia 4500ocn761854003OCoLC20130614123750.0111118s2011 sp a e b 001 0 spa d97884807693038480769300(OCoLC)761854003BTCTAengBTCTAYDXCPBDXZP7TXGGPIOCLCOSAPNTGUtOrBLWspafree-sp---NTGA720.9223720.92 THI SPANISHThiébaut, Philippe.Gaudí, bâtisseur visionnaire.SpanishGaudí :arquitecto visionario /Phillipe Thiébaut ; [traducción, Marina Huguet Cuevas].1. ed. en lengua españ̃ola.Barcelona :Blume,2011.127 p. :ill. (some col.) ;18 cm.Biblioteca ilustrada ;7Descubrir el arteTranslation of: Gaudí, bâtisseur visionnaire.Un arquitecto, una ciudad --De Oriente al nacionalismo catalán --Gótico y racionalismo --Hacia una arquitectura orgánica --El templo helénico del gótico mediterráneo.Includes bibliographical references (p. 122-123) and index.Gaudí, Antoni,1852-1926.ArchitectureSpainBarcelona.Architecture, Modern19th century.Architecture, Modern20th century.Spanish language editionNonfiction.localHuguet Cuevas, Marina.Biblioteca ilustrada.Descubrir el arte.Biblioteca ilustrada (Blume Ediciones (Barcelona, Spain)) ;7.csr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01150cam a2200325Ia 4500ocn809534372OCoLC20130116234221.0120910s2012 enka e 000 0 eng d 201194287497814380012581438001258(OCoLC)809534372IMDIMDYDXCPNTGUtOrBLWNTGA741.2 CURCurnow, Vera.Drawing and sketching :expert answers to the questions every artist asks /Vera Curnow.1st U.S. ed.London :Quantum Books ;Hauppage, NY :Barrons Educational Series,2012.224 p. :col. ill. ;20 cm.Art answersIncludes index.Media and application techniques --Other equipment --Basic practices --Tone, color, and composition --Working on location --Landscapes and townscapes --Figures and animals --Still life and flowers --Approaches and style.DrawingTechnique.Art answers.jmhaC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01090cam a2200349Ia 4500ocn788293318OCoLC20130116204308.0120421s2012 moua e 6 000 1 eng d 20129367509781449423063 (pbk.)144942306X (pbk.)(OCoLC)788293318BTCTAengBTCTABDXWIMCPLUtOrBLWn-us---NTGAPN6728.F64A46675 2012741.5697323741.56973 AMEAmend, Bill.FoxTrot.SelectionsJasotron: 2012 :a FoxTrot collection /by Bill Amend.Kansas City, Mo. :Andrews McMeel Pub.,c2012.144 p. :chiefly col. ill. ;21 x 26 cm.Collection of previously published comic strips.FamiliesComic books, strips, etc.American wit and humor, Pictorial.Comic books, strips, etc.United States.djn 2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01278cam a2200361Ia 4500ocn779265290OCoLC20130116204853.0120304s2012 moua e 6 000 1 eng d 20129367409781449424046144942404X(OCoLC)779265290BTCTAengBTCTABDXOEMTXAWIMUtOrBLWNTGA741.56/97323741.56973 TOOToomey, Jim P.Sherman's Lagoon.SelectionsThink like a shark :avoiding a porpoise-driven life : the seventeenth Sherman's Lagoon collection /by Jim Toomey.Avoiding a porpoise-driven lifeKansas City, Mo. :Andrews McMeel Pub.,c2012.127 p. :chiefly ill. (some col.) ;23 cm.More adventures of Sherman the shark and his friends on Kapupu Island.SharksComic books, strips, etc.Seashore animalsComic books, strips, etc.Marine animalsComic books, strips, etc.American wit and humor, Pictorial.Comic books, strips, etc.United States.djn 2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01234cam a2200409 i 4500ocn788282347OCoLC20130116172401.0120705t20122012waua e b 000 0 eng 2012024301GBB288928bnb016163382Uk8095640059781604680867 (pbk.)1604680865 (pbk.)(OCoLC)788282347(OCoLC)809564005DLCengrdaDLCBTCTABDXOCLCOCDXUKMGBNTGUtOrBLWpccNTGATT835.F547 2012746.4623746.46 FIN Finley, Jill,1956-Home sweet quilt :fresh, easy quilt patterns from Jillily Studio /Jill Finley.Bothell, WA :Martingale,[2012]96 pages :illustrations (chiefly color) ;28 cmtextrdacontentunmediatedrdamediavolumerdacarrierIncludes bibliographical references.PatchworkPatterns.QuiltingPatterns.House furnishings.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02089cam a22003618a 4500ocn785077610OCoLC20130116201040.0121015s2012 nyua e 000 0 eng 201203778297803991616430399161643(OCoLC)785077610DLCengDLCBTCTABDXOCLCOJTHYDXCPVHPABGMOFNTGUtOrBLWNTGAPN1997.B444B75 2012791.43/7223791.4372 BRIBridges, Jeff,1949-The Dude and the Zen master /Jeff Bridges & Bernie Glassman.New York :Blue Rider Press,c2012.272 p. :ill. ;22 cm.Just throw the fu**ing ball, man! Sometimes you eat the bear, and sometimes, well, he eats you ;It's down there somewhere, let me take another look ;Dude, you're being very unDude --The Dude abides and the Dude is not in. Yeah, well, ya know, that's just like, uh, your opinion, man ;Phone's ringin', Dude ;New sh** has come to light --That rug really tied the room together, did it not? You know, Dude, I myself dabbled in pacifism at one point. Not in 'Nam, of course ;You mean coitus? ;What makes a man, Mr. Lebowski? ;What do you do, Mr. Lebowski? ;Nothing's fu**ed, Dude --Enjoyin' my coffee. Sorry, I wasn't listening ;Strikes and gutters, ups and downs ;Some burgers, some beers, a few laughs. Our fu**ing troubles are over, Dude ;Say, friend, ya got any more of that good sarsaparilla?A dialogue between the actor and his long-time spiritual guru explores the challenges of Bridges' Hollywood career and the ways in which Zen teachings have informed his efforts to do good in the modern world.Big Lebowski (Motion picture)Philosophy in motion pictures.Zen BuddhismDoctrines.Conduct of life.Glassman, Bernard(Bernard Tetsugen)edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00934cam a2200301Ia 4500ocn781681058OCoLC20130116200612.0120324s2012 nyua e 000 0 eng d016127134Uk9781455520589 (pbk.)1455520586(OCoLC)781681058BTCTAengBTCTABDXUKMGBOCOYDXCPSRCOQXZQPBWXOCORMNTGUtOrBLWNTGAPN1992.77.P676P67 2012791.4572 PORPortlandia :a guide for visitors /Fred Armisen, [et al.].New York, N.Y. :Grand Central Publishing,2012.192 p. :col. ill. ;21 cm.Portlandia (Television program)Portland (Or.)Humor.Armisen, Fred,1966-Brownstein, Carrie.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01352cam a2200361Ia 4500ocn802295184OCoLC20130116205937.0120723s2012 nyua e 000 0 eng d 2012935560GBB273090bnb016131693Uk9781618930057 (hbk.)1618930052 (hbk.)(OCoLC)802295184UKMGBengUKMGBOCLCOBKXVP@NTGUtOrBLWNTGA796.8323796.83 SCHSchatz, Howard,1940-At the fights :inside the world of professional boxing /Howard Schatz ; executive producer/editor, Beverly J. Ornstein ; forward by Jim Lampley.New York :Sports Illustrated,c2012.240 p. :ill. (chiefly col.) ;37 cm.An award-winning photographer famed for his series of improvisational portraits of actors for Vanity Fair presents an oversized treasury of images that complement striking depictions of today's athletic stars with insightful sports commentary."Sports illustrated."Boxing.BoxingPictorial works.Ornstein, Beverly J.Sports illustrated.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01454cam a2200409Ia 4500ocn817945746OCoLC20130128214807.0121114s2012 nyu e 000 1 spa d80192897497804514188450451418840(OCoLC)817945746(OCoLC)801928974HT#HT#YDXCPBTCTABDXNTGUtOrBLWspaengNTGAPS3604.A9875863 DAY SPANISHDay, Sylvia.Bared to you.SpanishDesnuda ante ti /Sylvia Day.1a ed.New York :Berkley Books,2012.viii, 354 p. ;21 cm.Una novela de la serie crossfireIncludes a readers guide.In Spanish.Translation of: Bared to you.Era guapo y genial, irregular y ardiente. Me sentía atraída hacia él como a nada ni a nadie en toda mi vida. Ansiaba su contacto como una droga, aun sabiendo que me debilitaría. Yo era imperfecta, estaba dañada y él abría esas grietas mías con tanta facilidad.Man-woman relationshipsFiction.Erotic fiction.gsafdSpanish language editionFiction.localDay, Sylvia.Crossfire novel.Spanish.cme2013-01-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02315cam a2200433Ia 4500ocn794922014OCoLC20130614123751.0120316s2012 sp e 000 1 spa d77841797597884666473118466647317(OCoLC)794922014(OCoLC)778417975Spanish Pubs Llc, 8871 Sw 129 Ter, Miami, FL, USA, 33176, (305)2333365SAN 002-0400M$KM$KBTCTAYDXCPBDXTXGGPINTGUtOrBLWspaengNTGAPS813.5423863 KOS SPANISHKostigen, Thomas.Blood Line.SpanishLa línea de sangre /Thomas M. Kostigen ; traducción de Máximo González Lavarello.1a ed.Barcelona ;Miami :Ediciones B,2012.335 p. ;23 cm.La tramaText in Spanish.Translation of: Blood line.Michael Shea es un reconocido periodista de la BBC que trabaja como enviado especial en los lugares más conflictivos del planeta. En Oriente Próximo se entera de que tendrá lugar una reunión secreta entre el cabecilla de la milicia armada islamista y el secretario del ministro de Justicia israelí. Intenta infiltrarse y, tras ser descubierto, se ve forzado a huir. El gran secreto consiste en un tratado entre las dos partes por el cual se cede gas natural a los israelíes a cambio de dinero para armar a las milicias palestinas. En el negocio también está involucrada la mayor compañía de gas inglesa y su inversionista saudí más importante. Si la gente lo supiera, los resultados para los jefes serían desastrosos... Mientras tanto, los israelíes ponen una bomba en la sede de la BBC en Palestina. ¿Por qué de pronto Shea se convierte en el criminal más buscado del momento? ¿Qué tiene que ver su propio tío en todo el entramado? -- Cover, p. [4]JournalistsFiction.TerrorismFiction.Spanish language editionFiction.localGonzález Lavarello, Máximo.Trama (Ediciones B (Firm))cme2013-01-29aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01198cam a2200397Ma 4500ocn786041029OCoLC20130614123751.0120409s2011 sp e 000 1 spa d7536350309788466645201 (pbk.)8466645209 (pbk.)(OCoLC)786041029(OCoLC)753635030CPGCPGBTCTAYDXCPBDXPL#I5BOCLCOTMENTGUtOrBLWNTGAPQ[Fic]863 MAD SPANISHMadrid, Juan,1947-Brigada central.3,El hombre del reloj /Juan Madrid.Hombre del relojBrigada central IIIEd. rev. por el autor, 1a. ed.Barcelona :Ediciones B,2011.453 p. ;23 cm.Brigada centralLa tramaPoliceSpainFiction.RomaniesSpainFiction.Spanish language editionFiction.localMadrid, Juan,1947-Serie Brigada central.Trama (Ediciones B (Firm))cme2013-01-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01711cam a2200409 a 4500ocn783166282OCoLC20130614123751.0120711s2012 nyu e 000 1 spa 201202619397803079513730307951375(OCoLC)783166282DLCengDLCBTCTABDXOCLCOYDXCPMLYCDXNTGUtOrBLWpcce-sp---NTGAPQ6663.A7218M36 2012863/.6423863 MAR SPANISHMarías, Javier.Mañana en la batalla piensa en mí /Javier Marías ; prólogo de Elide Pittarello.1a ed. Vintage Español.Nueva York :Vintage Español,2012.354 p. ;21 cm.Text in Spanish."A married woman, whose husband is in London, invites another man, whom she hardly knows, for dinner. In her house a two-year-old boy is sleeping. Finally, when the man approaches her after dinner, the woman is feeling ill. She dies before ever becoming his lover. What to do with the corpse, the boy, the absent husband? What is the difference between life and death? This is one of the most passionate and emotional novels of recent times and surely the most lauded"--Amazon.com.Married womenSpainMadridFiction.Television writersSpainMadridFiction.DeathFiction.SecrecyFiction.Madrid (Spain)Fiction.Spanish language editionFiction.localcme2013-01-28aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01916cam a2200433Ia 4500ocn806253786OCoLC20130614123751.0120815s2012 mx e 000 1 spa 8050512779786071119056 (pbk.)6071119057 (pbk.)(OCoLC)806253786(OCoLC)805051277LPULPUBTCTAYDXCPCPGCPLBDXNTGUtOrBLWspaitaNTGAPQ4873.A9532N4718 2012[Fic]863 MAZ SPANISHMazzantini, Margaret.Nessuno si salva da solo.SpanishNadie se salva solo /Margaret Mazzantini ; traducción de Carlos Gumpert.1a ed.Mexico, D.F. :Alfaguara,2012.218 p. ;24 cm.Text in Spanish; translated from the Italian.Translation of: Nessuno si salva da solo."Delia y Gaetano eran pareja. Ya no lo son, y han de aprender a asumirlo. Desean vivir tranquilos pero, al mismo tiempo, les inquieta y seduce lo desconocido. ¿En qué se equivocaron? No lo saben. La pasión del comienzo y la rabia del fi nal están todavía demasiado cercanas. En una época en la que parece que ya está todo dicho, sus palabras y silencios dejan al desnudo sus soledades, sus urgencias, sus recuerdos, y provocan brillos imprevistos al poner en escena, una noche de verano, el viaje del amor al desamor"--Cover p. [4].Husband and wifeFiction.Marital conflictFiction.Separation (Law)Fiction.Anorexia nervosaFiction.Italian fictionTranslations into Spanish.Spanish language editionFiction.localGumpert, Carlos.cme2013-01-29aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$00885cam a2200313 a 4500ocn738353144OCoLC20130116161237.0120201s2011 sp a e 000 1 spa 201241726797884991834428499183441(OCoLC)738353144DLCengDLCBTCTABDXPL#I5BNTGUtOrBLWpccNTGAPQ6719.A644E86 2011863 SAN SPANISHSantos, Jesús M.Esperanza /Jesús M. Santos.1. ed.Barcelona :Roca Editorial,2011.351 p. :ill. ;24 cm.Labrador Pérez, Esperanza,1922-2011Fiction.Spanish language editionFiction.localcsr2013-01-15aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02340cam a2200433 i 4500ocn798437876OCoLC20130129182513.0120621r20122011nyu e 000 1 spa 201202568579426366797801424269370142426938(OCoLC)798437876(OCoLC)794263667DLCengrdaDLCNVCOCLCOMLYJPLYDXCPBTCTANTGUtOrBLWpccNTGAPQ8180.43.R58M85 2012863/.723863 TRI SPANISHTriviño Argüello, Pilar.Las mujeres del Tío Sam :una novela /Pilar Triviño Argüello.New York, New York :C.A. Press, Penguin Group (USA),2012.vii, 280 pages ;23 cmtextrdacontentunmediatedrdamediavolumerdacarrierText in Spanish."Las mujeres del Tío Sam, una novela basada en hechos reales, narra las relaciones amorosas de mujeres colombianas de diferentes clases sociales, edades y procedencias que se involucraron con los norteamericanos que hacían parte de la ayuda de Estados Unidos al Plan Colombia. Valeria es una joven profesional que una noche cualquiera el azar cambia su vida y la de sus amigas. Se relacionan en un exclusivo y lujoso bar en Bogotá con militares del los EE. UU., que son hombres bellos, fortachones y, sobretodo, herméticos que no dejan pistas de sus actividades militares en Colombia. Sus atenciones las seducen y entran en un mundo desconocido que las llevan a vivir experiencias amorosas, eróticas, de prostitución, de esperanza y de decepción. Ellas están ilusionadas a alcanzar, a través del amor y la pasión, el "American Dream"--Cover p. [4].Man-woman relationshipsFiction.SoldiersUnited StatesFiction.SoldiersColombiaFiction.ColombiaFiction.Spanish language editionFiction.localReproduction of (manifestation):Triviño Argüello, Pilar.Las mujeres del Tío SamBogatá [Colombia] : Editorial Oveja Negra, 2011(DLC) 2011443710cme2013-01-29aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01767cam a22004574a 4500ocn779266072OCoLC20130116231652.0120813s2012 nyuaf e 000 0aeng 2012030594817577994978145169146714516914679781451691535 (ebook)145169153X (ebook)9781451691504 (pbk.)1451691505 (pbk.)(OCoLC)779266072(OCoLC)817577994DLCengDLCBTCTABDXYDXCPOCLCOIK2LIVFOLLTBWXTXBXLIG#UtOrBLWpccn-us---a-af---NTGADS371.4123.K67G58 2012958.104/742B23958.104742 GIUGiunta, Salvatore A.(Salvatore Augustine),1985-Living with honor /Salvatore A. Giunta ; with Joe Layden.1st Threshold Editions hardcover ed.New York :Threshold Editions,2012.294 p., [8] p. of plates :ill. ;24 cm.A Medal of Honor recipient shares the story of his military career, recounting his deployment to Afghanistan's volatile Korengal Valley and his life-risking efforts to provide medical aid to wounded fellow soldiers during an October 2007 Taliban ambush.Giunta, Salvatore A.(Salvatore Augustine),1985-Afghan War, 2001-CampaignsAfghanistanKorangal Valley.Afghan War, 2001-Personal narratives, American.Medal of HonorBiography.SoldiersUnited StatesBiography.Layden, Joe.djn 2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02176cam a2200385M 4500ocn809615904OCoLC20130116184544.0120910s2012 xx e 000 0 eng d14555228219781455522828(OCoLC)809615904YDXCPYDXCPUUCOCLCOUtOrBLWNTGA629.45009223B HERNANDEZ SPANISHHernández, José M.,1962-Alcanzar las estrellas :la historia inspiradora de un trabajador migrante que se convirtió en astronauta /José M. Hernández [and Monica Rojas Rubin ; prólogo por Emilio Estefan].Cosechador de estrellasTitle in English:Reaching for the stars :the inspiring story of a migrant farmworker turned astronaut1a ed.[New York] :Grand Central Pub.,2012.xvi, 282 p., [16] p. of plates :col. ill. ;24 cm.Title on cover: El cosechador de estrellas.Born into a family of migrant workers, toiling in the fields by the age of six, Jose M. Hernández dreamed of traveling through the night skies on a rocket ship. [The book] is the inspiring story of how he realized that dream, becoming the first Mexican-American astronaut. Hernández didn't speak English till he was 12, and his peers often joined gangs, or skipped school. And yet, by his twenties he was part of an elite team helping develop technology for the early detection of breast cancer. He was turned down by NASA eleven times on his long journey to donning that famous orange space suit. Hernández message of hard work, education, perseverance, of "reaching for the stars," makes this a classic American autobiography.Hernández, José M.,1962-AstronautsUnited StatesBiography.Migrant agricultural laborersCaliforniaBiography.Spanish language editionNonfiction.localRubin, Monica Rojas.Estefan, Emilio.lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01236cam a2200373 a 4500ocn779265891OCoLC20130116215440.0120409s2013 nyu e 000 0aeng 20120113249781451675368 (hbk.)1451675364 (hbk.)9781451675382 (ebk.)1451675380 (ebk.)(OCoLC)779265891DLCengDLCIG#BTCTABDXOCLCOGK5YDXCPJP3VP@NTGUtOrBLWn-us---NTGAPN2287.L28555A3 2013791.4302/8092B23B LAWLESSLawless, Wendy.Chanel bonfire :a memoir /Wendy Lawless.1st Gallery Books hardcover ed.New York :Gallery Books,2013.295 p. ;22 cm."Wendy Lawless's ... memoir of resilience in the face of an unstable alcoholic and suicidal mother"--Provided by publisher.Lawless, Wendy.Motion picture actors and actressesUnited StatesBiography.ActressesUnited StatesBiography.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02375cam a2200421 a 4500ocn788281963OCoLC20130116223211.0120924s2013 kyua e bq s001 0beng 20120346429780813136905 (hardcover : alk. paper)0813136903 (hardcover : alk. paper)9780813136912 (pdf)0813136911 (pdf)9780813140384 (epub)0813140382 (epub)40021728439(OCoLC)788281963DLCengDLCYDXBTCTABDXOCLCOYDXCPOCLCOYUSNTGUtOrBLWpccNTGAPN2287.M83A55 2013791.43/028/092B23B MURRAYAnkerich, Michael G.,1962-Mae Murray :the girl with the bee-stung lips /Michael G. Ankerich ; foreword by Kevin Brownlow.Lexington, Ky. :University Press of Kentucky,c2013.xi, 376 p. :ill. ;24 cm.Screen classicsIncludes bibliographical references, filmography and index.Untangling Mae Murray's tangled beginnings, 1885-1899 --Dancing into the new century, 1900-1907 --Ziegfeld and the millionaire, 1908-1911 --Life is a cabaret, 1912-1914 --From footlights to kliegs, 1915 --The disillusions of a dream girl, 1916 --Ready for my close-ups, Mr. Lasky!, 1917 --The delicious little Mae, 1918-1919 --On with the dance, 1920 --Strutting like a peacock through Tiffany's, 1921-1922 --Mae the enchantress, 1923-1924 --the merry widow and the dirty Hun, May 1924 March 1925 --From merry widow to gay divorcée, 1925 --Princess Mdivani, 1926 --The lion's roar, the baby's cry, 1927 --A world of cheap imitations, 1928 --The sound of bee-stung lips, 1929-1931 --Oh, brother!, 1932 --From a prince to a toad, 1933 --Losing Koran, 1934-1940 --Outliving fame, 1941-1957 --Self-enchantment, 1958 --1960 --A star in twilight, 1961-1965 --Epilogue --Acknowledgments --Professional theater --Filmography.Murray, Mae,1885-1965.Motion picture actors and actressesUnited StatesBiography.Screen classics (Lexington, Ky.)JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01446cam a2200409Ia 4500ocn792890096OCoLC20130614123751.0120429s2012 sp a e 000 0aspa c97884663257078466325700(OCoLC)792890096BTCTAengBTCTAYDXCPBDXGPISJPUtOrBLWspaengNTGA813.5423B OATES SPANISHOates, Joyce Carol,1938-Widow's story.SpanishMemorias de una viuda /Joyce Carol Oates ; traducción de María Luisa Rodríguez Tapia.1a ed.Madrid :Punto De Lectura,2012.[471] p. :ill. ;19 cm.Original title: Widow's story.Joyce Carol Oates shares her struggle to comprehend a life absent of the partnership that had sustained and defined her for nearly half a century.Oates, Joyce Carol,1938-Family.Smith, Raymond J.Death and burial.WidowsUnited StatesBiography.Loss (Psychology)Bereavement.Authors, American20th centuryFamily relationships.Spanish language editionNonfiction.localRodríguez Tapia, María Luisa.lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01094nam a2200301Ka 4500ocn824509599OCoLC20130116182040.0130116s2009 wauac e 000 0aeng d9780974256832 (pbk.)0974256838 (pbk.)(OCoLC)824509599NTGNTGNTGUtOrBLWNTGA979.777B23B THOMSENThomsen, Ralph Einar.Memories of Richmond Beach :Ralph Einar Thomsen's recollections of small-town life on Puget Sound in the 1920s - 1940s /[by Ralph E. Thomsen].Ralph Einar Thomsen's recollections of small-town life on Puget Sound in the 1920s - 1940sSeattle, Wash. :Ralph E. Thomsen ;Shoreline, Wash. :Shoreline Historical Museum,c2009.viii, 72 p. :ill., maps ;23 cm.Thomsen, Ralph Einar.Richmond Beach (Wash.)Biography.Richmond Beach (Wash.)History.slh2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01928cam a2200481Ia 4500ocn679931885OCoLC20130614123751.0101107s2011 nyua a 000 1 spa d9781442422926 (boardbook)1442422920 (boardbook)(OCoLC)679931885BTCTAengBTCTAIEPJTHIF9BDXNCLNTGUtOrBLWspaengNTGAPZ74.3.M33483 2011[E]23E MARTIN SPANISHMartin, Bill,1916-2004.Chicka chicka ABC.SpanishChica chica bum bum ABC /por Bill Martin Jr. y John Archambault ; ilustrado por Lois Ehlert ; [traducción de Alexis Romay].Chica chica bum bum A B C1a ed. en lengua española.Nueva York :Simon & Schuster Libros Para Niños,2011.1 v. (unpaged) :col. ill. ;20 x 14 cm.Text in Spanish.Texto en español.Translation of: Chicka chicka ABC.On board pages."Publicado originalmente en inglés en 1993 con el título Chicka chicka ABC por Little Simon"--P. 4 of cover."A le dijo a B, y B le dijo a C: "En el cocotero, ¡allí de esperare!" Pero cuando todas las letras del alfabeto suben al cocotero... ¡Oh, no¡ Chica chica ¡BUM! ¡Bum!An alphabet rhyme/chant that relates what happens when the whole alphabet tries to climb a coconut tree.English languageAlphabetJuvenile fiction.Alphabet rhymes.Stories in rhyme.Spanish language editionJuvenileFiction.localArchambault, John.Ehlert, Lois.Romay, Alexis.cme2013-01-29aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01199cam a2200361 a 4500ocm26553002 OCoLC20130614123751.0920804s1994 nyua a 000 1 eng 92029020 3450661222939385503992244599780399224454(OCoLC)26553002(OCoLC)34506612(OCoLC)229393855DLCengDLCVPWAZUOCLBAKERBTCTAYDXCPUPZMR0CS1CQUIG#TAMSABDXOCLCQOCLCONTGUtOrBLWlcacNTGAPZ7.R1936Go 1994PS3568.A7197G66 1994[E]20E RATHMANNRathmann, Peggy.Good night, Gorilla /Peggy Rathmann.New York :Putnam,c1994.1 v. (unpaged) :col. ill. ;19 x 22 cm.An unobservant zookeeper is followed home by all the animals he thinks he has left behind in the zoo.Zoo animalsJuvenile fiction.ZoosJuvenile fiction.Picture books for children.gsafdsc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01450cam a2200421Ia 4500ocm44114877 OCoLC20130614123751.0000524s1981 nyua a 000 0 eng d 85042990 81043037 767580702006445002397800644500279780808529941 (lib. bdg.)0808529943 (lib. bdg.)0690041225 (Cover)0690041233(OCoLC)44114877(OCoLC)767580702CLOCLOOCLOCLCGOTPIG#BDXNTGUtOrBLWNTGAQB801.7.B73 1981523.819E523.8 BRABranley, Franklyn Mansfield,1915-2002.The sky is full of stars /by Franklyn M. Branley ; illustrated by Felicia Bond.New York, NY :HarperCollins,c1981.34 p. :ill. (some col.) ;21 x 22 cm.Let's-read-and-find-out science. Stage 2Explains how to view stars and ways to locate star pictures, known as constellations, throughout the year."HarperTrophy."--Pg. [4] of cover.StarsJuvenile literature.ConstellationsJuvenile literature.Bond, Felicia,ill.Let's-read-and-find-out science.Stage 2.bp2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01217cam a2200385Ia 4500ocn740627341OCoLC20130614123751.0110706s2012 nyu e 000 1 eng d016009425Uk97803125450620312545061(OCoLC)740627341BTCTAengBTCTAUKMGBBDXOCOJRSEINCPNTGUtOrBLWNTGAPR9199.4.B364B33 2012FIC BARANTBarant, D. D.Back from the undead /D.D. Barant.St. Martin's pbk. ed.New York, N.Y. :St. Martins Paperbacks,2012.330 p. ;18 cm.The bloodhound files ;bk. 05Jace faces a danger like no other as she seeks justice for a group of condemned children.VampiresFiction.Criminal profilersFiction.Child traffickingFiction.Vancouver (B.C.)Fiction.Mystery fiction.gsafdFantasy fiction.Barant, D. D.Bloodhound files ;bk. 05.AMW 20120409aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01481cam a2200421 a 4500ocn768728884OCoLC20130116211117.0111216s2013 nyu e 000 1 eng 20110480009781451666342 (pbk.)1451666349 (pbk.)9781451666410 (ebk.)1451666411 (ebk.)(OCoLC)768728884DLCengDLCIG#OCLCOBTCTABDXGL4YDXCPCGPOCLCOABGNTGUtOrBLWNTGAPS3606.R36535T48 2013813/.623FIC FRANCISFrancis, Wendy.Three good things /Wendy Francis.3 good things1st Simon & Schuster trade pbk. ed.New York :Simon & Schuster Paperbacks,2013.235 p. ;21 cm."A novel"--Cover.The story of two sisters, one of whom opens a bake shop that features the traditional Danish pastry called kringle, the other of whom is a successful attorney with a baby at home, both of whom long for the guidance of their deceased mother.Includes a reading group guide.SistersFiction.Divorced womenFiction.New mothersFiction.Mothers and daughtersFiction.BakeriesFiction.edl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01980cam a2200421 a 4500ocn778422664OCoLC20130116194608.0120712s2012 nyub e 000 f eng 2012028307016142751Uk9780312607074 (hardcover)0312607075 (hardcover)9781466802506 (e-book)1466802502 (e-book)(OCoLC)778422664DLCengDLCYDXBTCTABDXYDXCPUKMGBOCLCOIUKJP3LEBBWXNTGUtOrBLWpccn-us-vaNTGAPS3557.I4945V53 2012813/.5423FIC GINGRICHGingrich, Newt.Victory at Yorktown /Newt Gingrich, William R. Forstchen, and Albert S. Hanser, contributing editor.1st ed.New York :Thomas Dunne Books,c2012.xiv, 345 p. :maps ;25 cm.General Washington, seeking to end a three-year stalemate, embarks on a secret three-hundred-mile forced march of his entire army to meet the French navy's Chesapeake Bay blockade and capture Cornwallis's entire force. Sgt. Peter Wellsley must pave the way for the army, neutralizing any loyalists who might provide warning. On the other side, Allen Van Dorn receives reports from civilians that something is afoot and is tasked to find out what. As Wellsley moves to block any leaks, Van Dorn tries to penetrate the screen. When one of the former friends is captured, both must decide where their true loyalties lie during the heat of the Battle of Yorktown.Yorktown (Va.)HistorySiege, 1781Fiction.United StatesHistoryRevolution, 1775-1783Fiction.Historical fiction.Forstchen, William R.autHanser, Albert S.edtslh 20121203aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01922cam a2200397 a 4500ocn783162692OCoLC20130116215011.0120925s2013 nyu e 000 1 eng 2012036180016229234Uk9780393338621 (pbk.)0393338622 (pbk.)(OCoLC)783162692DLCengDLCIG#BTCTABDXOCLCOUKMGBIH9YDXCPCOOBWXOCPNTGUtOrBLWa-ii---NTGAPR9499.4.J676I45 2013823/.9223FIC JOSEPHJoseph, Manu.The illicit happiness of other people /Manu Joseph.1st American ed.New York :W. W. Norton & Co.,2013.344 p. ;21 cm.Seventeen-year-old Unni has done something terrible. The only clue to his actions lies in a comic strip he has drawn, which has fallen into the hands of his father Ousep - a nocturnal anarchist with a wife who is fantasizing about his early death. Ousep begins investigating the extraordinary life of his son, but as he circles closer and closer to the truth, he unravels a secret that shakes his family to the core. Set in Madras in the 1990s, where every adolescent male is preparing for the toughest exam in the world, this is a powerful and darkly comic story involving an alcoholic's probe into the minds of the sober, an adolescent cartoonist's dangerous interpretation of absolute truth, an inner circle of talented schizophrenics and the pure love of a 12-year-old boy for a beautiful girl.FamiliesIndiaFiction.Chennai (India)Fiction.Black humor.gsafdLove stories.gsafdBlack humor.Love stories.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01431cam a2200433 i 4500ocn783161163OCoLC20130614123752.0120523s2012 ilu e 000 f eng 20120208209781564787859 (cloth : alk. paper)1564787850 (cloth : alk. paper)9781564788139 (pbk. : alk. paper)156478813X (pbk. : alk. paper)(OCoLC)783161163DLCengDLCYDXBTCTABDXYDXCPOCLCOIADORXCDXGZMVP@NTGUtOrBLWengnorpccNTGAPT8951.29.A39S4513 2012839.82/37423FIC SAETERBAKKENSæterbakken, Stig,1966-2012.Selvbeherskelse.EnglishSelf-control /Stig Sæterbakken ; translated by Seán Kinsella.Champaign :Dalkey Archive Press,2012.154 p. ;21 cm.Norwegian literature series"Originally published in Norwegian as Selvbeherskelse by J.W. Cappelen, Oslo, 1998."ParalyticsFiction.FamiliesFiction.Married peopleFiction.HusbandsFiction.Kinsella, Seán.Norwegian literature series.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01769cam a2200337Ia 4500ocn823170908OCoLC20130116194503.0130102s2013 cau e 000 1 eng d7957596119781597804455 (pbk.)1597804452 (pbk.)(OCoLC)823170908(OCoLC)795759611IMRIMRBTCTABDXOCLCONTGUtOrBLWNTGAFIC TEPPOTeppo, Mark.Earth thirst /Mark Teppo.First ed.San Francisco, CA :Night Shade Books,2013.277 p. ;25 cm."Vampires are our last hope" -- front cover."Humanity -- over-breeding, over-consuming -- is destroying the very planet they call home. Multi-national corporations despoil the environment, market genetically-modified crops to control the food supply, and use their wealth, influence and private armies to crush anything, and anyone, that gets in the way of their profits. Nothing human can stop them. But something unhuman might. Once they did not fear the sun. Once they could breathe the air and sleep where they chose. But now they can rest only within the uncontaminated soil of Mother Earth - and the time has come for them to fight back against the ruthless corporations that threaten their immortal existence. They are the last guardians of paradise, more than human but less than angels. They call themselves the Arcadians. We know them as vampires..." -- back cover.VampiresFiction.OverpopulationFiction.PollutionFiction.Night Shade Books.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01644cam a2200445 a 4500ocm57142725 OCoLC20130116212244.0041117t20052001nyu e 000 1 eng 200406364956780652623825617219073039781585674824158567482697815856770921585677094(OCoLC)57142725(OCoLC)56780652(OCoLC)62382561(OCoLC)721907303DLCengDLCIG#WIQSRBBAKERBTCTAYDXCPGO3YW6ABGYVROCLCQBDXUtOrBLWe-uk-enNTGAPR6072.I525S66 2005823/.91422FIC VINCENZIVincenzi, Penny.Something dangerous /Penny Vincenzi.Woodstock, NY :Overlook Press,2005, c2001.710 p. ;23 cm.Born into the powerful Lytton family, twins Adele and Venetia Lytton grow up in a golden world, until the dark specter of Nazi Germany begins to loom over Europe and the Lytton family faces challenges for which they are unprepared.Lytton family (Fictitious characters)Fiction.Publishers and publishingFiction.SistersFiction.TwinsFiction.EnglandFiction.Great BritainHistoryGeorge V, 1910-1936Fiction.Great BritainHistoryGeorge VI, 1936-1952Fiction.Domestic fiction.Love stories.gsafdJL2012-05-18aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01895cam a2200409 a 4500ocn779266197OCoLC20130116182912.0120914s2013 nyub e b 000 1 eng 20120349879781451674705 (pbk.)1451674708 (pbk.)(OCoLC)779266197DLCengDLCIG#BTCTABDXYDXCPOCLCOIK2VP@BWXNTGUtOrBLWn-us-nyn-us-njNTGAPS3623.A336S48 2013813/.623FIC WADEWade, Christine.Seven locks :a novel /Christine Wade.7 locks1st Atria Paperback ed.New York :Atria Paperback,2013.vi, 329 p. :map ;21 cm."The Hudson River Valley, 1769. A man mysteriously disappears without a trace, abandoning his wife and children on their farm at the foot of the Catskill Mountains. At first many believe that his wife, who has the reputation of being a scold, has driven her husband away, but as the strange circumstances of his disappearance circulate, a darker story unfolds. And as the lines between myth and reality fade in the wilderness, and an American nation struggles to emerge, the lost man's wife embarks on a desperate journey to find the means to ensure her family's survival"--P. [4] of cover.Includes a reading group guide.Includes bibliographical references (p. 327).FamiliesNew York (State)Fiction.Farm lifeNew York (State)Fiction.Hudson River Valley (N.Y. and N.J.)History18th centuryFiction.Catskill Mountains Region (N.Y.)Fiction.Historical fiction.gsafdedl2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01669cam a2200409Ia 4500ocn761854367OCoLC20130614123752.0111120s2011 sp a c 000 1 spa d9788479429072 (tapa dura)8479429070 (tapa dura)(OCoLC)761854367BTCTAengBTCTABDXDWPJTHNTGUtOrBLWspadutNTGAJ JAQUET SPANISHJaquet, Gertie.Het snoepprinsesje.SpanishQué princesa tan golosa! /Gertie Jaquet ; [traducción del neerlandés, Nadine Beliën].Madrid :Macmillan Iberia,c2011.37 p. :ill. ;22 cm.LibrosaurioText in Spanish.Translation of: Het snoepprinsesje."Malena es una princesa que se pasa el día comiendo regaliz, caramelos y galletas de chocolate. Y al final, se encuentra tan inflada como un globo y toda desdichada. Los Reyes están alarmados: ¿ćomo podrán hacer que su hija sea feliz? La inesperada llegada de un músico, hará que su vida cambie a ritmo de rap"--Cover p. [4].Obesity in childrenJuvenile fiction.ConfectioneryJuvenile fiction.PrincessesJuvenile fiction.Rap (Music)Juvenile fiction.Children's stories, DutchTranslations into Spanish.Spanish language editionJuvenileFiction.localBëlien, Nadine.Librosaurio.cme2013-01-24aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01833cam a2200445Ia 4500ocn747534402OCoLC20130614123752.0110821s2012 sp a c 000 1 spa d97884152352178415235216(OCoLC)747534402BTCTAengBTCTABDXOQXIMDTXWHSNTGUtOrBLWspaitaNTGA[Fic]23J PIRATESCHL SPANISH V.7Stevenson, Steve.Diario di Capitan Barracuda.SpanishEl diario del Capitán Barracuda /Steve Stevenson ; ilustraciones de Stefano Turconi ; [traducción, Julia Osuna Aguilar].At head of title: Laescuela de piratas1a. ed.Barcelona :Pirueta,2011.82 p. :col. ill. ;21 cm.Escuela de Piratas ;7Text in Spanish.Ha comenzado el segundo curso en la Escuela de Piratas y los Lobitos de Mar, para variar, arman una de las suyas: dejan que se les escape delante de sus narices la tripulación más cafre, la de los novatos. ¿Conseguirán encontrarla? ¿Y qué ocurrirá cuando, en su búsqueda, se tropiecen con un pirata..., o, mejor dicho, con su esqueleto?PiratesJuvenile fiction.Treasure trovesJuvenile fiction.IslandsJuvenile fiction.Buried treasureFiction.Adventure and adventurersFiction.Spanish language editionJuvenileFiction.localTurconi, Stefano,1974-ill.Osuna Aguilar, Julia.Stevenson, Steve.Scuola dei Pirati.Spanish ;7.bp2013-01-15aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01873cam a2200361Ia 4500ocn793220853OCoLC20130116151516.0120501s2012 flua c 000 0bspa d9781614353416 (pbk.)1614353417 (pbk.)(OCoLC)793220853BTCTAengBTCTABDXIFANYPGO3CLEUtOrBLWNTGAPQ8097.N4Z65 2012JB NERUDA SPANISHLázaro León, Georgina.Conoce a Pablo Neruda /Georgina Lázaro León ; ilustraciones de Valeria Cis.Pablo NerudaDoral, Fla. :Alfaguara,c2012.27 p. :col. ill. ;26 cm.Personajes del mundo hispánico"Pablo Neruda fue senador, cónsul, embajador, académico de la lengua y uno de los poetas más reconocidos de la literatura universal. Y a pesar de ser un señor tan importante, conservaba su alma de niño. Coleccionaba objetos y libros como si fueran juguetes, se pintaba bigotes con corcho quemado, le encantaban los pájaros y, en una ocasión, domesticó una mangosta. ¿Quieres conocer a Pablo Neruda? ¡Abre este libro y empieza a leer!"--P. [4] of cover.Pablo Neruda was many things in his life, but is best known for being a great poet. Yet he never lost his ability to be childlike and had a wonderful sense of humor. Want to get to know Pablo Neruda? Open this book and start reading!Neruda, Pablo,1904-1973Juvenile literature.Poets, Chilean20th centuryBiographyJuvenile literature.Spanish language editionJuvenileNonfiction.localCis, Valeria.Personajes del mundo hispánico.lmc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01563cam a2200433Ii 4500ocn820835985OCoLC20130614123752.0121211s2013 nyu e 000 1 eng d78828874397803079857670307985768(OCoLC)820835985(OCoLC)788288743UPZengrdaUPZBTCTABDXYDXCPIZ2MR0VP@NTGUtOrBLWn-us-meNTGAPS3619.H5426S78 2013813/.623M SHIELDSShields, Kieran,author.A study in revenge :[a novel] /Kieran Shields.First edition.New York :Crown Publishers,[2013]372 pages ;24 cmtextrdacontentunmediatedrdamediavolumerdacarrierA sequel to The Truth of All Things finds late-19th century police detective Archie Lean and his half-Native American partner, Perceval Grey, investigating the theft of a recently buried body and the staging of a bizarre occult scene that is linked to a centuries-old magical relic.PoliceMainePortlandFiction.Occultism and criminal investigationFiction.Grave robbingFiction.RevengeFiction.Portland (Me.)Fiction.Historical fiction.gsafdMystery fiction.gsafdJL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01944cam a2200361Ia 4500ocn788241518OCoLC20130116192010.0120418t20132012nyu e 000 1 eng d9780451238931 (pbk.)0451238931 (pbk.)(OCoLC)788241518BTCTAengBTCTABDXJQWZS3CO2IEPNTGUtOrBLWNTGAPS3568.O843485S43 2012813.5423ROM ROSSRoss, JoAnn.Sea glass winter /JoAnn Ross.New York :Signet,[2013], c2012.388 p. ;18 cm.A Shelter Bay novel"As an Explosive Ordnance Disposal Specialist, Dillon Slater had one of the most dangerous jobs in the military. Now, he's enjoying the pace of life in Shelter Bay, where he teaches high school physics. He still gets to blow things up, but as the school basketball coach he also gets to impart leadership skills. His latest minefield: fifteen-year-old Matt Templeton; and Matt's irresistible mother. Claire Templeton moved her troubled teenage son to the small town of Shelter Bay to escape the bad influences at his school in L.A. But when his attitude earns her a visit from the handsome basketball coach, she wonders if this role model might be too much of a temptation--for her. Because though she isn't looking for a relationship, she can't seem to resist Dillon's playful charm. But what she doesn't realize is that Dillon isn't playing games; he's playing for keeps."-- P. [4] of cover.Fatherless familiesFiction.Problem youthFiction.Teacher-counselor relationshipsFiction.Man-woman relationshipsFiction.Love stories.gsafdRoss, JoAnn.Shelter Bay novel.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01747cam a2200373Ia 4500ocn779266065OCoLC20130116191416.0120308s2013 nyu e 000 1 eng d97814516715991451671598(OCoLC)779266065BTCTAengBTCTABDXFOLLTJO3YW6IZ8NTGUtOrBLWNTGA813/.623ROM SHOWALTERShowalter, Gena.Last kiss goodnight /Gena Showalter.1st Pocket Books pbk. ed.New York :Pocket Books,2013.436 p. ;18 cm.Otherworld assassin"Pocket Books paranormal romance"--Spine.Black ops agent Solomon Judah awakens caged and bound in a twisted zoo where otherworlders are the main attraction.Vika Lukas, the owner's daughter, is tasked with Solo's care and feeding. The monster inside him yearns to kill her on sight, even though she holds the key to his escape. But the human side of him realizes the beautiful deaf girl is more than she seems--she's his. Vika endures the captives' taunts and loathing, hoping to keep them alive even if she can't free them. Only, Solo is different-- he protects her. But as hostility turns to forbidden romance, his feelings for her will be used against him ... and he'll be put to a killer test.AssassinsFiction.Man-woman relationshipsFiction.Deaf womenFiction.Paranormal romance stories.Occult fiction.gsafdShowalter, Gena.Otherworld assassin.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01829cam a2200517 a 4500ocn781939269OCoLC20130614123752.0120327s2013 nyu d 000 1 eng 20120061459780385742061 (hbk.)0385742061 (hbk.)9780375990397 (lib. bdg.)0375990399 (lib. bdg.)9780375979972 (ebk.)0375979972 (ebk.)(OCoLC)781939269DLCengDLCIG#BTCTABDXYDXCPSINLBJAGOCLCOIK2NTGUtOrBLWn-us-nyNTGAPZ7.C7834Jan 2013[Fic]23Y COONEYCooney, Caroline B.Janie face to face /Caroline B. Cooney.1st ed.New York :Delacorte Press,c2013.345 p. ;22 cm.At college in New York City, Janie Johnson, aka Jennie Spring, seems to have successfully left behind her past as "The face on the milk carton," but soon she, her families, and friends are pursued by a true-crime writer who wants their help in telling her kidnapper's tale.KidnappingJuvenile fiction.AuthorshipJuvenile fiction.College stories.Man-woman relationshipJuvenile fiction.Identity (Psychology)Juvenile fiction.FamiliesJuvenile fiction.New York (N.Y.)Juvenile fiction.Universities and collegesFiction.LoveFiction.IdentityFiction.Family lifeFiction.College stories.Love stories.gsafdsc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01667cam a22004938a 4500ocn793503410OCoLC20130614123752.0120501s2013 nyu d 000 1 eng 201201299397814424439901442443995(OCoLC)793503410DLCengDLCBTCTABDXVHPOCLCOILCUPZNTGUtOrBLWlcacpccNTGAPZ7.S3818Fal 2013[Fic]23Y SCHROEDERSchroeder, Lisa.Falling for you /Lisa Schroeder.1st Simon Pulse hardcover ed.New York :Simon Pulse,2013.355 p. ;22 cm.Very good friends, her poetry notebooks, and a mysterious "ninja of nice" give seventeen-year-old Rae the strength to face her mother's neglect, her stepfather's increasing abuse, and a new boyfriend's obsessiveness.Family problemsFiction.Dating (Social customs)Juvenile fiction.High schoolsJuvenile fiction.SchoolsFiction.PoetryJuvenile fiction.StepfathersJuvenile fiction.FloristsJuvenile fiction.Family problemsFiction.Dating (Social customs)Fiction.High schoolsFiction.SchoolsFiction.PoetryFiction.StepfathersFiction.FloristsFiction.Young adult fiction.JL2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01107cam a2200349 a 4500ocn794037285OCoLC20130614123752.0120918s2013 nyu d 000 1 eng 20120316289781595145994 (hbk.)1595145990 (hbk.)(OCoLC)794037285DLCengDLCIG#BTCTABDXCO2OCLCOIK2NTGUtOrBLWNTGAPZ7.Y89592Pap 2013[Fic]23Y YOVANOFFYovanoff, Brenna.Paper valentine /Brenna Yovanoff.New York :Razorbill,c2013.304 p. ;22 cm.Followed everywhere by the ghost of her recently deceased best friend, Hannah investigates the serial murders of young girls in her community.GriefJuvenile fiction.Ghost stories.Serial murderersJuvenile fiction.GhostsFiction.Ghost stories.gsafdsc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01297cam a2200385 a 4500ocn809789695OCoLC20130614123752.0120910s2012 ilua d 000 0ceng 20120340449781608461561 (hbk.)1608461564 (hbk.)(OCoLC)809789695DLCengDLCIG#YDXCPNTGUtOrBLWn-us---NTGACT217.C44 2012303.48/40922B23Y303.484092 ONE101 changemakers :rebels and radicals who changed US history /edited by Michele Bollinger and Dao X. Tran.One hundred one changemakersHundred one changemakersChicago, IL :Haymarket Books,2012.ix, 215 p. :ill. ;29 cm.Social reformersUnited StatesBiographyJuvenile literature.Social movementsUnited StatesJuvenile literature.United StatesBiographyJuvenile literature.ReformersUnited StatesBiography.United StatesJuvenile biography.Bollinger, Michele.Tran, Dao X.AMW2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01239cam a2200337Ka 4500ocn760977475OCoLC20130116193926.0111112s2012 nyua e 000 0 eng d9781609787110 (pbk.)1609787110 (pbk.)(OCoLC)760977475BTCTAengBTCTAYDXCPBDXDPLNTGUtOrBLWNTGAY570.76 STA 2013/2014Stabler, Linda Brooke.AP biology 2013-2014 /Linda Brooke Stabler, Mark Metz, Paul Gier.Advanced placement biology 2013-2014Kaplan AP biology 2013-2014New York :Kaplan,c2012.xi, 317 p. :ill. ;; 28 cm."[Includes] two full-length practice tests, detailed answer explanations, score-raising strategies and tips, diagnostic test, end-of-chapter quizzes"--Cover.BiologyExaminationsStudy guides.Advanced placement programs (Education)ExaminationsStudy guides.College entrance achievement testsStudy guides.Metz, Mark.Gier, Paul.csr2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$02804cgm a2200613Ia 4500ocm51844300 OCoLC20130116205432.0vd mvaizu030312t20051997vtu072 e vleng d15780797569781578079759783421366893WG36689WGBH Boston Video(OCoLC)51844300TNHengTNHJBWOCLCQRCSOCLCQCYCBTCTARCSUMCOCLCQYDXCPSTFAKUXY4SBMBLPWAUOCLCAYVROCLCQUtOrBLWengengn-us---NTGAHV4504.R53 2003385/.097322DVD 385.0973 RIDRiding the rails[videorecording] /directed, written, & produced by Michael Uys and Lexy Lovell ; the American History Project ; Out of the Blue Productions, Inc.United States.[S. Burlington, Vt.] :WGBH Boston Video,[2005], c1997.1 videodisc (ca. 72 min.) :sd., col. with b&w sequences ;4 3/4 in.DVD; Dolby digital; aspect ratio: 4x3 full screen.In English; closed-captioned.Cinematography, Samuel Henriques ; editor, Howard Sharp ; original music, Jay Sherman-Godfrey.Originally produced as a documentary film in 1997.Companion to the book: Riding the rails, teenagers on the move during the Great Depression / by Errol Lincoln Uys.An edited version of "Riding the rails" was produced as an episode of the PBS documentary television program American experience in 2003.Not rated."Tells the unforgettable story of the 250,000 teenagers who left their homes and hopped freight trains during the Great Depression"--Container.Special features: interview with Uys & Lovell (5 min.); slide show of Depression-era photos (3 min., 28 images); excerpt from the companion book; weblinks.Depressions1929United States.TeenagersUnited States.United StatesEconomic conditions1918-1945.United StatesHistory1919-1933.RailroadsUnited StatesHistory.TrampsUnited StatesHistory.Historical films.lcgftDocumentary films.lcgftVideo recordings for the hearing impaired.lcgftUys, Michael.prodrtausLovell, Lexy.ausprodrtAmerican History Project.Out of the Blue Entertainment (Firm)WGBH Video (Firm)American experience (Television program)cme2013-01-16hC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01493cas a2200445 i 4500ocn823177198OCoLC20130116071520.0130102c20139999mduar s 0 a0eng c 2013234400(OCoLC)823177198IHVrdaIHVGZLHVLINUIULNTGUtOrBLWpccn-us---NTGAHA202.P76317.323R310 PROProQuest statistical abstract of the United States.Statistical abstract of the United StatesNational data bookLanham, Maryland :Bernan,2012-volumes :maps ;29 cmAnnualtextrdacontentunmediatedrdamediavolumerdacarrier2013-Volume for 2013 called also "1st ed." in suggested citation.Content edited by ProQuest.Description based on: 2013; title from title page.Latest issue consulted: 2013.United StatesStatisticsPeriodicals.ProQuest (Firm)Online version:ProQuest statistical abstract of the U.S.[Ann Arbor, Mich.] : ProQuest, [2012]-(OCoLC)823161068Statistical abstract of the United States0081-4741(DLC) 04018089(OCoLC)1193890EP 20130116aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01853cam a2200397 a 4500ocn693809384OCoLC20130116202350.0111026s2012 cau e b 001 0 eng 2011044358015947615Uk7571485869781608717439 (pbk. : alk. paper)1608717437 (pbk. : alk. paper)(OCoLC)693809384(OCoLC)757148586DLCengDLCYDXBTCTAYDXCPZMMUKMGBBWXBDXCOOCLUUCXCDXNTGUtOrBLWpccn-us---NTGAKF8742.S914 2012347.73/2623R347.7326 SUP 2012The Supreme Court compendium :data, decisions & developments /Lee Epstein ... [et al.].5th ed.Thousand Oaks, Calif. :CQ Press,c2012.xxviii, 836 p. ;24 cm.Includes bibliographical references (p. 809-816) and index.The Supreme Court : an institutional perspective --The Supreme Court's review process, caseload, and cases --The Supreme Court's opinion, decision, and outcome trends --The justices : backgrounds, nominations, and confirmations --The justices : post-confirmation activities and departures from the Court --The justices : oral arguments, votes, and opinions --The Supreme Court : its political and legal environments --The Supreme Court and public opinion --The impact of the Supreme Court.United States.Supreme CourtOutlines, syllabi, etc.Constitutional lawUnited StatesOutlines, syllabi, etc.Judicial reviewUnited StatesOutlines, syllabi, etc.Epstein, Lee,1958-sc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01935cam a2200409 a 4500ocn767824989OCoLC20130116182419.0111205s2012 ncua e ber 001 0 eng 2011046545015991274Uk101580315DNLM9780786464586 (softcover : alk. paper)0786464585 (softcover : alk. paper)(OCoLC)767824989DLCengDLCYDXUKMGBYDXCPCDXNLMSBMABGNTGUtOrBLWpccNTGARC523.M665 2012WT 13M821e 2012616.8/3100323R616.831003 MOO 2012Moore, Elaine A.,1948-Encyclopedia of Alzheimer's disease :with directories of research, treatment and care facilities /Elaine A. Moore with Lisa Moore ; illustrated by Marvin G. Miller ; foreword by David Perlmutter.2nd ed.Jefferson, N.C. :McFarland,c2012.viii, 447 p. :ill. ;26 cm.Includes bibliographical references and index.The encyclopedia --Long term day care treatment centers, by state --Research facilities."The second edition contains updated resources, research institution information, a listing of treatment and care facilities, advances in Alzheimer's disease research, genetics, diagnostic procedures, treatment, alternative medicine, brain plasticity, risk factors, clinical trial information, nursing home safety, and preventive measures. The book describes medical treatments used in other countries and the results of collaborative efforts"--Provided by publisher.Alzheimer's diseaseEncyclopedias.Alzheimer's diseaseDirectories.Moore, Lisa,1973-sc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01475cam a2200433Ia 4500ocn808438897OCoLC20130614123752.0120828s2013 nyua e bf 001 0 eng d016038641Uk9780071638562 (pbk.)0071638563 (pbk.)(OCoLC)808438897MPAMPAOCLCOTEFCNMBLUKMGBIULNTGUtOrBLWNTGARG101.C94 2013WQ 100C976 201361823R618 CUR 2013Current diagnosis & treatment :obstetrics & gynecology /[edited by] Alan H. DeCherney, ... [et al.].Current diagnosis and treatment :obstetrics and gynecologyObstetrics & gynecologyObstetrics and gynecology11th ed.New York :McGraw-Hill Medical,c2013.xv, 1,024 p. :ill. (some col.) ;24 cm.Previous ed.: Current diagnosis and treatment : obstetrics & gynecology. New York : Lange/McGraw-Hill, 2007."A Lange medical book"--T.p.Includes bibliographical references and index.Obstetrics.ObstetricsDiagnosis.Gynecology.GynecologyDiagnosis.DeCherney, Alan H.Obstetrics & gynecology.sc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01946cam a2200433 a 4500ocn707248074OCoLC20130614123752.0110705s2012 njua e 001 0 eng 2011028227015803963Uk9780470648858 (pbk.)0470648856 (pbk.)(OCoLC)707248074DLCengDLCYDXBTCTAYDXCPUKMGBBONDEBBGCOONTGUtOrBLWpccNTGANA31.C44 2012720.323R720.3 CHI 2012Ching, Frank,1943-A visual dictionary of architecture /Francis D. K. Ching.2nd ed.Hoboken, N.J. :Wiley,c2012.viii, 328 p. :ill. ;31 cm.Includes index."Over 66 basic aspects of architecture are comprehensively covered with over 5,000 words in a visual context, to help visual thinkers clarify meanings. Comprehensive index permits the reader to locate any important word in the text. Oversized pages help present complicated material in easy-to-comprehend spreads. - Written by one of the most famous architectural authors --Frank Ching's name alone is a key selling feature for this book--he has earned the respect and trust of designers, design educators, and students around the world"--Provided by publisher.ArchitectureDictionaries.Picture dictionaries, English.ARCHITECTURE / General.bisacshArchitektur.(DE-588c)4002851-3.swdWörterbuch.(DE-588c)4066724-8.swdArchitektur.(DE-588)4002851-3.gndWörterbuch.(DE-588)4066724-8.gndCover imagehttp://catalogimages.wiley.com/images/db/jimages/9780470648858.jpgsc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01062cam a2200313I 4500ocn812172044OCoLC20130116183508.0121008s2012 ilu e 001 0 eng d97809817736740981773672(OCoLC)812172044IMDIMDYDXCPNTGUtOrBLWNTGA737.497323R737.4973 SWISwiatek, Anthony.Encyclopedia of the commemorative coins of the United States :history, art, investment & collection of America's memorial coinage /edited and compiled by Anthony J. Swiatek.Chicago, Ill. :KWS Publishers,2012.712 p. :ill. ;24 cm.Includes index.Commemorative coinsUnited States.Commemorative coinsCollectors and collectingUnited States.Coins, American.Coins, AmericanCollectors and collecting.sc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01487cam a2200421 a 4500ocn318420872OCoLC20130116201348.0111209s2012 caua e be 001 0 eng 20110501559780313349485 (hbk. : acid-free paper)0313349487 (hbk. : acid-free paper)9780313349492 (ebook)0313349495 (ebook)(OCoLC)318420872DLCengDLCBTCTAYDXCPBDXBWXIADMLYNTGUtOrBLWpccn-mx---NTGAF1210.M6175 2012972.08/4223R972.0842 MEXMexico today :an encyclopedia of life in the republic /Alex M. Saragoza, Ana Paula Ambrosi and Silvia D. Zárate, editors.Santa Barbara, Calif. :ABC-CLIO,c2012.2 v. (xxxvi, 728 p.) :ill. ;27 cm.Includes bibliographical references and index.V. 1.A-H --v. 2.I-Z.MexicoCivilizationEncyclopedias.MexicoSocial life and customsEncyclopedias.MexicoPolitics and government2000-Encyclopedias.MexicoEconomic conditions1994-Encyclopedias.Saragoza, Alex.Ambrosi, Ana Paula.Zárate, Silvia D.(Silvia Dolores)sc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +INSERT INTO biblio.record_entry (marc, last_xact_id) VALUES ($BOOGADYBOOOGADYBOOOO$01644cam a2200457Ma 4500ocn779454161OCoLC20130116200050.0110808s2012 caua e be 001 0deng 20110316669780313329449 (set : alk. paper)0313329443 (set : alk. paper)9780313329456 (v. 1 : alk. paper)0313329451 (v. 1 : alk. paper)9780313329463 (v. 2 : alk. paper)031332946X (v. 2 : alk. paper)(OCoLC)779454161DLCengN15OCLCONTGUtOrBLWn-us---NTGAE169.12.E515 2012973.923R973.9 ENCEncyclopedia of the sixties :a decade of culture and counterculture /James S. Baugess and Abbe Allen DeBolt, editors.Santa Barbara, Calif. :Greenwood,c2012.2 v. (xliv, 871 p.) :ill. ;26 cm.Includes bibliographical references and index.V. 1.A-M --v. 2.N-Z.United StatesCivilization20th centuryEncyclopedias.CountercultureUnited StatesHistoryEncyclopedias.United StatesHistory1961-1969Encyclopedias.United StatesBiographyEncyclopedias.Nineteen sixtiesEncyclopedias.CountercultureHistoryEncyclopedias.BiographyEncyclopedias.Baugess, James S.DeBolt, Abbe Allen.sc2013-01-16aC0NTG$BOOGADYBOOOGADYBOOOO$, 'kyle'); +COMMIT; + diff --git a/KCLS/bs_files/bs_json.bre b/KCLS/bs_files/bs_json.bre new file mode 100644 index 0000000000..a791d7b101 --- /dev/null +++ b/KCLS/bs_files/bs_json.bre @@ -0,0 +1,200 @@ +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"1","IMPORT-1377205600.55222","01381cam a2200349Ia 4500ocn798409621OCoLC20121228212212.0120628s2012 nju e 001 0 eng d 2012947904016117955Uk9781118408292 (pbk.)1118408292 (pbk.)(OCoLC)798409621UKMGBengUKMGBOCLCOSINLBILCYDXCPNTGUtOrBLWNTGA005.446523005.4465 RIZRizzo, John.OS X Mountain Lion server for dummies /by John Rizzo.Hoboken, N.J. :Wiley,c2012.xx, 380 p. ;24 cm.Includes index.OS X Mountain Lion server is the easy way to tame your network. This book shows you how to get it up and running, and how to set up all the features to harness its power for your home or office.Introduction --Getting Mountain Lion server up and running --Creating and maintaining user accounts and directories --Serving up files and printers --Facilitating user collaboration --Managing clients --The part of tens.Mac OS.Operating systems (Computers)Macintosh (Computer)Programming.lmc2012-12-21aC0NTG",null,null,"OCLC","o798409621"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"2","IMPORT-1377205600.55222","02200cam a2200397Ma 4500ocn794816001OCoLC20130614123744.0120522s2012 cc a e 001 0 eng d016095133Uk7928801789781449316136 (pbk.)1449316131 (pbk.)(OCoLC)794816001(OCoLC)792880178UKMGBengUKMGBBTCTABDXYDXCPOCLCOJSTEINCPCOOILCUtOrBLWNTGATR267.5.A33B788 2012006.68623006.686 BRUBrundage, Barbara(Barbara K.)Photoshop elements 11 :the missing manual /Barbara Brundage.Photoshop elements eleven.1st ed.Beijing ;Sebastopol, CA :O'Reilly,2012.xxv, 630 p. :chiefly col. ill. ;24 cm.The missing manualInclude index.Whether you're a photographer, scrapbooker, or aspiring graphic artist, Photoshop Elements is an ideal image-editing tool-- once you know your way around. This guide removes the guesswork, and provides jargon-free advice and step-by-step guidance.Introduction to Elements. Finding your way around Elements ;Importing, managing, and saving photos ;Rotating and resizing photos --Elemental elements. The quick fix ;Making selections ;Layers: the heart of elements --Retouching. Basic image retouching ;Elements for digital photographers ;Retouching: fine-tuning images ;Removing and adding color ;Photomerge: creating panoramas, group shots, and more --Artistic elements. Drawing with brushes, shapes, and other tools ;Filters, actions, layer styles, and gradients ;Text in elements --Sharing images. Creating projects ;Printing photos ;Email and the Web ;Online albums and slideshows --Additional elements. Beyond the basics --Appendices. Installation and troubleshooting.Adobe Photoshop elements.PhotographyDigital techniques.Missing manual.lmc2012-12-21aC0NTG",null,null,"OCLC","o794816001"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"3","IMPORT-1377205600.55222","01786cam a2200397Ia 4500ocn800644386OCoLC20121228211350.0120702s2012 njua e 001 0 eng d 2012948656016122474Uk9781118374887 (pbk.)1118374886 (pbk.)(OCoLC)800644386UKMGBengUKMGBEINCPYDXCPVP@ILCBWXJO3UtOrBLWNTGA006.75423006.754 HARHarvell, Ben,1981-Facebook /by Ben Harvell.Hoboken, N.J. :John Wiley & Sons Inc.,c2012.305 p. :col. ill. ;24 cm.Teach yourself visuallyVisual read less-learn more\"The fast and easy way to learn\" --Cover.Includes index.Provides clear, step-by-step instructions for tackling more than 185 Facebook tasks. Each task-based spread covers a single technique, sure to help you get up and running on Facebook in no time.Setting up an account --Setting security --Setting privacy --Finding and organizing friends --Setting your status --Communicating with friends --Using timeline and news feed --Using groups and events --Sharing photos, video, and music --Using apps --Working with notes --Using search and notifications --Accessing Facebook mobile --Using location services on Facebook --Understanding Facebook ads and pages.Facebook (Electronic resource)Online social networks.Teach yourself visually.Visual read less, learn more.lmc2012-12-21aC0NTG",null,null,"OCLC","o800644386"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"4","IMPORT-1377205600.55222","01007cam a22003618a 4500ocn775418885OCoLC20121228220517.0120802s2012 nyu e 001 0 eng 201202924497803995376910399537694(OCoLC)775418885DLCengDLCBTCTABDXOCLCOOCOIK2YDXCPNTGUtOrBLWpccNTGABF698.35.I59D46 2012155.2/3223155.232 DEMDembling, Sophia.The introvert's way :living a quiet life in a noisy world /Sophia Dembling.1st ed.New York :Penguin Group,2012.ix, 198 p. ;19 cm.\"A Perigee book.\"Includes index.Introversion.Introverts.Interpersonal relations.JL2012-12-28aC0NTG",null,null,"OCLC","o775418885"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"5","IMPORT-1377205600.55222","01813cam a2200397 a 4500ocn774496036OCoLC20121228203031.0120531s2012 cau e b 000 0 eng 20120219439781608823482 (pbk. : alk. paper)1608823482 (pbk. : alk. paper)(OCoLC)7744960361348902QBIDLCengDLCYDXBTCTABDXOCLCOYDXCPBWXKAAQBXNTGUtOrBLWpccNTGABF637.C74L38 2012158.223158.2 LAVLavender, Neil J.Impossible to please :how to deal with perfectionist coworkers, controlling spouses, and other incredibly critical people /Neil J. Lavender, Alan Cavaiola.Subtitle on the cover:Make no mistakeOakland, CA :New Harbinger Publications,c2012.iv, 182 p. ;23 cm.Includes bibliographical references (p. 181-182).Controlling perfectionism explained --The controlling perfectionist as romantic partner, as parent, and in the workplace --How the controlling perfectionist has affected you --Recognizing what you can and can't do --Setting limits and boundaries --Establishing better communication --Handling controlling perfectionists in romantic relationships, family life, and friendships --Handling controlling perfectionists in the workplace --Seeking qualified professional help.Criticism, Personal.Perfectionism (Personality trait)Interpersonal conflict.Interpersonal relations.Cavaiola, Alan A.AMW2012-12-28aC0NTG",null,null,"OCLC","o774496036"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"6","IMPORT-1377205600.55222","01514cam a2200397 a 4500ocn764313828OCoLC20121228091953.0120409s2012 flu e b 000 0deng 20120117069781616386122 (trade paper)1616386126 (trade paper)1616387204 (e-book)6387203 (e-book)(OCoLC)764313828DLCengDLCBTCTANTGUtOrBLWpccNTGABV5091.V6G99 2012236/.123236.1 GYWGwyn, Liz.Amazing stories of life after death /Liz Gwyn.1st ed.Lake Mary, Fla. :Charisma House,c2012.xxxi, 189 p. ;22 cm.Includes bibliographical references.Mr. Smith --Comatose --Moses --Doug's revenge --Huge reptile --Highway 21 --Aneurysm --The bear --Yes, Lord --Z-28 --A little boy, a grandpa, and Jesus --Four-year-old and an angel --RSVP --Angels in the room --Jesus in the ambulance --Cole and Jesus --Twisted car --Fiery pit --AT-6 war plane --Tunnel --Field of flowers --The cage --Warning --Leroy --His splendor.Visions.Supernatural.Miracles.Near-death experiencesReligious aspectsChristianity.bp2012-12-28aC0NTG",null,null,"OCLC","o764313828"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"7","IMPORT-1377205600.55222","01706cam a2200361Ia 4500ocn801965682OCoLC20121228081648.0120727s2012 scu e b 000 0 eng d9781470158484 (pbk.)1470158485 (pbk.)(OCoLC)801965682JBLJBLJBLNTGUtOrBLWn-us---NTGA323.32942097323323.32942 ROBRobin, Daniel K.Libertarian war on poverty :repairing the ladder of upward mobility /Daniel K. Robin.[Charleston, S.C. :CreateSpace],2012.xviii, 201 p. ;23 cm.Includes bibliographical references.The author presents his opinions and perspectives on how laws have impeded those trying to climb out of poverty and how the concepts of liberty and freedom must be the foundation for any viable solution to poverty.Why help the poor? --Champions of the poor --Making it hard to work --Migration policy and poverty --Choosing education to end poverty --The war on drugs meets the war on poverty --Occuaptional licensure and excessive regulation --Health care --Where have all the savings gone? --Housing --Economic growth : the mother of all anti-poverty programs --Freedom is the champion of the little guy.PovertyUnited States.PoorUnited States.LibertarianismUnited States.United StatesSocial policy.United StatesPolitics and government2009-bp2012-12-28aC0NTG",null,null,"OCLC","o801965682"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"8","IMPORT-1377205600.55222","01214cam a22003614a 4500ocn815456189OCoLC20130614123744.0121030s2013 njua e 001 0 eng d 201294892111183564039781118356401(OCoLC)815456189UOKUOKILCBKLSRCUtOrBLWNTGAHG179.N42693 2013332.0240028553623332.024002 NELNelson, Stephen L.,1959-Quicken 2013 for dummies /by Stephen L. Nelson.Hoboken, N.J. :Wiley,c2013.xvi, 368 p. :ill. ;24 cm.--For dummiesIncludes index.Introduction --Zen, Quicken, and the big picture --The absolute basics --Home finances --Very serious business --The part of tens.Learn to keep your finances in order the easy way, with Quicken 2013-- and this handy guide!Quicken (Computer file)Finance, PersonalComputer programs.--For dummies.lmc2012-12-21aC0NTG",null,null,"OCLC","o815456189"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"9","IMPORT-1377205600.55222","01373cam a2200349Ia 4500ocn820738104OCoLC20121228210827.0121210s2012 cauab e 000 0 eng d73041440097819363656231936365626(OCoLC)820738104(OCoLC)730414400NYPICNYPICIAMCXPOCLCOBTCTABDXYDXCPNTGUtOrBLWn-us---NTGA362.870973 STAStabile, Gabriele.Refugee hotel /photographs by Gabriele Stabile ; text by Juliet Linderman.San Francisco :McSweeneys Books,c2012.319 p. :chiefly ill. (some col.), maps ;14 x 21 cm.Voice of witnessRefugee Hotel is a collection of photography and interviews that documents the arrival of refugees in the United States. Images are coupled with moving testimonies from people describing their first days in the U.S., the lives they've left behind, and the new communities they've since created.RefugeesUnited StatesPortraits.RefugeesUnited StatesInterviews.Documentary photography.Linderman, Juliet.Voice of witness.csr2012-12-28aC0NTG",null,null,"OCLC","o820738104"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"10","IMPORT-1377205600.55222","00879nam a2200325Ia 4500ocn811770216OCoLC20121228204922.0121003s2011 caua e 000 0aeng d97806154821250615482120(OCoLC)811770216NTGTEFUtOrBLWn-us-caNTGA363.209223363.2092 SUMSumner, Ted.Deep cover cop /Ted Sumner & Mills Crenshaw.Limited 1st ed.[California? :s.n.],c2011.vi, 401 p. :ill. (some col.) ;24 cm.Sumner, Ted.PoliceCaliforniaBiography.Undercover operationsCalifornia.Crenshaw, Mills.lmc2012-12-21aC0NTG",null,null,"OCLC","o811770216"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"11","IMPORT-1377205600.55222","01282cam a2200373 a 4500ocn793221822OCoLC20130614123744.0120518s2012 mnua e 000 0 eng 20120186379780873518703 (cloth : alk. paper)0873518705 (cloth : alk. paper)9780873518710 (e-book)0873518713 (e-book)(OCoLC)793221822DLCengDLCYDXBTCTABDXYDXCPOCLCOBWXGPIVP@NTGUtOrBLWpccn-us-mnNTGAHV6533.M6S93 2012364.152/309223364.152309 SWASwanson, William,1945-Black, white, blue :the assassination of patrolman Sackett /William Swanson.St. Paul, MN :Borealis Books,c2012.251 p. :ill. ;24 cm.Young men and murder --A very cold case --The burden of proof --Afterword --Acknowledgments.Sackett, James,1942-1970.Murder victimsMinnesotaSaint PaulCase studies.MurderMinnesotaSaint PaulCase studies.JL2012-12-28aC0NTG",null,null,"OCLC","o793221822"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"12","IMPORT-1377205600.55222","02358cam a2200397 i 4500ocn776523510OCoLC20121228224241.0120706s2012 nyuab e 001 0 eng 20120222349780316208192 (hardback)0316208191 (hardback)(OCoLC)7765235101348774QBIDLCrdaengDLCYDXBTCTABDXOCLCOYDXCPIEBIK2CDXQBXNTGUtOrBLWpccNTGAQ173.M18 201250023500 MADMad science :Einstein's fridge, Dewar's flask, Mach's speed, and 362 other inventions and discoveries that made our world /edited by Randy Alfred ; from the WIRED blog, This Day in Tech, founded by Tony Long.First edition.New York :Little, Brown and Company,2012.390 pages :illustrations, map ;22 cmtextrdacontentunmediatedrdamediavolumerdacarrierIncludes index.\"365 days of inventions, discoveries, science, and technology, from the editors of Wired Magazine. On January 30, Rubik applied for a patent on his cube (1975). On the next day, 17 years earlier, the first U.S. Satellite passed through the Van Allen radiation belt. On March 17, the airplane \"black box\" made its maiden voyage (1953). And what about today? Every day of the year has a rich scientific and technological heritage just waiting to be uncovered, and Wired's top-flight science-trivia book MAD SCIENCE collects them chronologically, from New Year's Day to year's end, showing just how entertaining, wonderful, bizarre, and relevant science can be. In 2010, Wired's popular \"This Day in Tech\" blog peaked with more than 700,000 page views each month, and one story in 2008 drew more than a million unique viewers. This book will collect the most intriguing anecdotes from the blog's run-one for each day of the year-and publish them in a package that will instantly appeal to hardcore techies and curious laypeople alike. \"--Provided by publisher.ScienceMiscellanea.ScienceBlogs.Alfred, Randy,editor.JL2012-12-28aC0NTG",null,null,"OCLC","o776523510"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"13","IMPORT-1377205600.55222","01748cam a22004218i 4500ocn781432393OCoLC20121228125419.0120308s2013 ilua e b 001 0 eng c 20120102139780226682952 (hardcover : alkaline paper)0226682951 (hardcover : alkaline paper)9780226923789 (e-book)0226923789 (e-book)(OCoLC)781432393ICU/DLCrdaengCGUDLCBTCTABDXOCLCOVKCYDXCPLF3CDXABGVP@BWXORXNTGUtOrBLWpccNTGAQD13.P75 2013540.1/1223540.112 PRIPrincipe, Lawrence,author.The secrets of alchemy /Lawrence M. Principe.Chicago ;London :University of Chicago Press,2013.v, 281 pages, [8] pages of plates :illustrations (some color) ;24 cmtextrdacontentunmediatedrdamediavolumerdacarrierSynthesisIncludes bibliographical references (pages 213-271) and index.What is alchemy? --Origins : Graeco-Egyptian chemeia --Development : Arabic al-kīmiyā --Maturity : Medieval Latin alchemia --Redefinitions, revivals, and reinterpretations : alchemy from the eighteenth century to the present --The golden age : practicing chymistry in the early modern period --Unveiling the secrets --The wider world of chymistry.AlchemyHistory.Alchemists.Synthesis (University of Chicago. Press)JL2012-12-28aC0NTG",null,null,"OCLC","o781432393"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"14","IMPORT-1377205600.55222","02959cam a2200481Ia 4500ocn802556589OCoLC20121228235430.0120731s2012 enka e b 001 0 eng d016082830Uk78587004797818566974391856697436(OCoLC)802556589(OCoLC)785870047SISPLSISPLSISPLCDXBTCTABDXUKMGBYDXCPILOYNKSINLBNTGUtOrBLWn-us---NTGATS171.4.H46 2012604.223604.2 HENHenry, Kevin.Drawing for product designers /Kevin Henry.London :Laurence King,2012.208 p. :ill. ;29 cm.Portfolio skillsIncludes bibliographical references (p. 207) and index.Understanding sketching --The psychology of sketching --Defining sketching. Case study: HLB design diagrams --Tutorial: orthographic projection --Tutorial: orthographic sketching --Orientation. Case study: Gerrit Rietveld's red and blue chair --Tutorial: rotated plan method --Case study: method --Tutorial: (de)constructing the cube --Tutorial: unfolding geometry --Registration. Case study: Myto chair --Case study: Mission One Motorcycle --Tutorial: sketching a tape measure --Form. Case study: TCV display for Olivetti --Tutorial: sketching a contoured bar of soap --Tutorial: sketching the Pringle potato chip --Tutorial: sketching an athletics shoe --Line. Case study: DC25 vacuum cleaner --Case study: vessel ideation --Tutorial: putting line and orientation together --Exploring forms in space. Tutorial: Panton chair --Tutorial: Vållö watering can --Tutorial: sketching tools --Explaining forms in space. Tutorial: fundamentals of rendering --Tutorial: rendering simple forms --Tutorial: rendering complex forms --Case study: Fiskars garden barrow --Exploring forms in time. Case study: Dyson DC25 user's manual --Case study: Golden section information graphics --Tutorial: creating a storyboard --Tutorial: sketching a cellphone --Tutorial: sketching an exhibit --Putting it all together. Tutorial: creating a presentation.Mechanical drawingStudy and teaching (Higher)Mechanical drawingTechnique.Freehand technical sketchingComputer-aided design.DrawingPsychological aspects.DrawingPhilosophy.Rendering (Computer graphics)Visual texture recognition.Technical illustration.Industrial designUnited States21st century.Product designCase studies.Space and timeDesign.Portfolio skills.Product design.vf2012-12-28aC0NTG",null,null,"OCLC","o802556589"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"15","IMPORT-1377205600.55222","02426cam a2200337Ia 4500ocn809041596OCoLC20130614123744.0120904s2012 cau e 000 0 eng 201101936297814019315201401931529(OCoLC)809041596APLAPLNTGUtOrBLWNTGA613.0424423 613.04244 KOF 2012Koff, Ashley.Mom energy :a simple plan to live fully charged ; from the experts who coach Hollywood's most celebrated moms /Ashley Koff, Kathy Kaehler.2nd ed.Carlsbad, Calif. :Hay House,2012.xviii, 251 p. ;23 cm.\"From celebrated dietitian Ashley Koff and fitness trainer to the stars Kathy Kaehler comes Mom Energy, an exciting new way for moms to tap into their own natural and renewable sources of energy to overcome fatigue and achieve their personal health goals. Koff and Kaehler have put together a safe, sensible, flexible, and, most importantly, effective program for moms of any age--whether their kids are in diapers or heading off to college.While being a mom is undeniably rewarding, it's also one of the most physically demanding and stressful activities in modern society. In fact, one of the most common complaints from mothers is that they simply don't have the energy to do everything they want, which means they end up sacrificing one thing (usually their own health) to accomplish another. But these sacrifices aren't necessary if you follow some simple guidelines and avoid secret energy saboteurs. With discussions that cover everything from nutrition to fitness to time management, Koff and Kaehler lay out a three-part program (reorganize, rehab, and recharge) that can be molded to any lifestyle to help moms naturally up their energy levels. And with this enhanced energy, they will get all sorts of other unexpected benefits, including a stronger immune system, easier weight loss, and even better relationships. Mom Energy will teach readers what will help and what will hurt in their quest for optimum energy. \"--Provided by publisher.WomenHealth and hygiene.Physical fitness.Motherhood.Kaehler, Kathy.AMW2012-12-28aC0NTG",null,null,"OCLC","o809041596"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"16","IMPORT-1377205600.55222","01015cam a22003378a 4500ocn800026941OCoLC20130614123744.0120928s2013 cau e 000 0 eng 20120321089781401940478 (hardback)1401940471 (hardback)(OCoLC)800026941DLCengDLCBTCTAOCLCOBDXNTGUtOrBLWpccNTGARA776.95.N53 201361323613 NICNicolai, Jim,1968-Integrative wellness rules :a simple guide to healthy living /Jim Nicolai.Carlsbad, Calif. :Hay House,c2013.xvii, 247 p. ;24 cm.Shares insights and strategies to optimize health and create wellness.Self-care, Health.Integrative medicine.Health promotion.AMW2012-12-28aC0NTG",null,null,"OCLC","o800026941"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"17","IMPORT-1377205600.55222","01875nam a2200373Ia 4500ocn822993543OCoLC20130614123744.0121228s2013 dcu e f001 0 eng d(OCoLC)822993543WIMWIMNTGUtOrBLWn-us---NTGA629.25323629.222 FUE 2013Fuel economy guide :model year 2013 /U.S. Department of Energy, U.S. Environmental Protection Agency.Model year 2013 fuel economy guide[Washington, D.C.] :U.S. Dept. of Energy, Office of Energy Efficiency and Renewable Energy :U.S. Environmental Protection Agency,[2013]i, 41 p. ;28 cm.Cover title.\"DOE/EE-0778\"--Cover.\"The U.S. Environmental Protection Agency (EPA) and U.S. Department of Energy (DOE) produce the Fuel Economy Guide to help buyers choose the most fuel-efficient vehicle that meets their needs. Most vehicles in this guide (other than plug-in hybrids) have three fuel economy estimates: a \"city\" estimate ... a \"highway\" estimate ... a \"combined\" estimate. Estimates for all vehicles are based on laboratory testing under standardized conditions to allow for fair comparisons\"--P. i.Includes index.AutomobilesUnited StatesFuel consumption.Motor vehiclesUnited StatesFuel consumption.TrucksUnited StatesFuel consumption.United States.Department of Energy.Office of Energy Efficiency and Renewable Energy.United States.Environmental Protection Agency.http://www.fueleconomy.gov/feg/pdfs/guides/FEG2013.pdfhttp://www.fueleconomy.govlmc2012-12-28aC0NTG",null,null,"OCLC","o822993543"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"18","IMPORT-1377205600.55222","01266cam a2200373Ia 4500ocn778422547OCoLC20121228210313.0120223s2012 enka e b 001 0 eng d9781608199754 (US)1608199754 (US)9781408152645 (UK)1408152649 (UK)(OCoLC)778422547BTCTAengBTCTABDXNLEOCLCOCUVYDXCPBWXVP@NTGUtOrBLWe------n-us---NTGA636.5022223SF505.L44 2012636.50222 LEWLewis, Celia,1948-The illustrated guide to ducks and geese and other domestic fowl :how to choose them, how to keep them /[text and illustrations by Celia Lewis].Ducks and geese and other domestic fowlLondon ;New York :Bloomsbury Publishing,2012.160 p. :col. ill. ;26 cm.Includes bibliographical references and index.Poultry.Poultry breeds.PoultryEuropePictorial works.PoultryUnited StatesPictorial works.csr2012-12-28aC0NTG",null,null,"OCLC","o778422547"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"19","IMPORT-1377205600.55222","01828cam a2200349Ia 4500ocn793585022OCoLC20130614123744.0120514s2011 scu e b 000 e eng d9781468012507 (pbk.)1468012509 (pbk.)(OCoLC)793585022YDXCPYDXCPBDXCOOJAONTGUtOrBLWNTGAQL82.H25 2012639.922639.9 HANHance, Jeremy Leon.Life is good :conservation is an age of mass extinction /Jeremy Leon Hance.[Charleston, S.C.] :Createspace,c2011.x, 204 p. ;c 21 m.Includes bibliographical references (p. 196-201)Introduction : the life emergency --Meeting Tam in Borneo : our last chance to save the world's smallest rhino --Will jellyfish take over the world? --Why top predators matter --The camera trap revolution : how a simple device is shaping research and conservation worldwide --Nature's greatest spectacle faces extinction --The penguin problem, or stop eating our fish! --What if Noah had left behind the ugly ones? --Zoos : why a revolution is necessary to justify them --The end of the oceans : from bounty to empty --Language and conservation : why words matter --Saving the world's weirdest mammal --Shifting baselines : forgetting the lost --Gone : extinction over the past decade --The anaconda and the fer-de-lance : one day on Suriname's jungle coast.Wildlife conservation.Nature conservation.Endangered species.Extinction (Biology)NatureEffect of human beings on.bp2012-12-28aC0NTG",null,null,"OCLC","o793585022"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"20","IMPORT-1377205600.55222","02003cam a2200361Ia 4500ocn794040293OCoLC20130614123744.0120519s2012 maua e 001 0 eng d016126998Uk97814405517961440551790(OCoLC)794040293BTCTAengBTCTABDXUKMGBCPLYDXCPUtOrBLWNTGATX147.D463 2012640 DENDenholtz, Charlotte.The modern-day pioneer :simple living in the 21st century /Charlotte Denholtz.Avon, MA :Adams Media,c2012.303 p. :ill. ;19 cm.\"Contains material adapted and abridged from: The everything guide to root cellaring, by Catherine Abbot ...; The everything small-space gardening book, by Catherine Abbot ...; The everything guide to living off the grid, by Terri Reid ...; The everything soapmaking book, 2nd ed., by Alicia Grosso; The everything candlemaking book, by M.J. Abadie ...; The everything vegetarian cookbook, by Jay Weinstein ...; The everything soup, stew, & chili cookbook, by Belinda Hulin ...; The everything cast-iron cookbook, by Cinnamon Cooper ...; The everything guide to food remedies, by Lorie Rice ...; The everything quilting book, by Sandra Detrixhe ...; The everything guide to herbal remedies, by Martha Schindler Connors with Larry Alshuler ...; The everything bread cookbook, by Leslie Bilderback ...; The everything homebrewing book, by Drew Beechum ...; The everything sewing book, by Sandra Detrixhe\"--P. [304].\"Sew, cook, quilt, farm, and craft your way to sustainable living!\"--Cover.Includes index.Home economics.Home economics, Rural.Handicraft.Sustainable living.Frontier and pioneer lifeMiscellanea.lmc2012-12-28aC0NTG",null,null,"OCLC","o794040293"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"21","IMPORT-1377205600.55222","01470cam a2200373 a 4500ocn792889585OCoLC20121228210402.0120801s2012 maua e 001 0 eng 2012024855016136002Uk8050186098157404099781603427272 (pbk.)1603427279 (pbk.)(OCoLC)792889585(OCoLC)805018609(OCoLC)815740409DLCengDLCIG#BTCTAOCLCOWIQIK2VP@UKMGBYDXCPNTGUtOrBLWNTGATX601.W45 2012641.423641.4 WEIWeingarten, Matthew.Preserving wild foods :a modern forager's recipes for curing, cannning, smoking, and pickling /Matthew Weingarten and Raquel Pelzel ; photography by Stéphanie de Rougé.North Adams, MA :Storey Pub.,c2012.256 p. :col. ill. ;24 cm.Includes index.Coastline : gifts from the sea --Pastures & hedgeroes : grazing lands and natural borders --Gardens & fields : cultivated and harvested --Forest & woods : foraged, picked, and plucked --Banks & wetlands : freshwater depths and shores.Canning and preserving.FoodPreservation.Cooking (Natural foods)Pelzel, Raquel.JL2012-12-28aC0NTG",null,null,"OCLC","o792889585"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"22","IMPORT-1377205600.55222","01432cam a2200361 a 4500ocn771652860OCoLC20121228205933.0120106s2012 nyua e 000 0 eng 201104925297815847996271584799625(OCoLC)771652860DLCengDLCYDXBTCTABDXOCLCOYDXCPOCLCQNTGUtOrBLWpccNTGATT387.P365 2012646.2/04423646.2044 FISFishbein, Dena.The painted home by Dena /photographs by John Ellis ; [editor, Wesley Royce].New York :Stewart, Tabori & Chang,c2012.200 p. :col. ill. ;26 cm.Introduction: surround yourself with the things you love --Welcome to Seven Oaks Ranch: entryway and family room --Everyday spaces: kitchen and sunroom --Places to gather: dining room, living room, and den --Cozy nooks: little bedroom and little sewing room --Private havens: master bedroom suite --A room of one's own: hallway and upstairs bedrooms --Inspirational spaces: studio and garden --Entertaining.House furnishings.Machine sewing.Ellis, John.Royce, Wesley.Dena Designs.JL2012-12-28aC0NTG",null,null,"OCLC","o771652860"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"23","IMPORT-1377205600.55222","01563cam a2200385 a 4500ocn785873735OCoLC20121228165835.0120512s2012 cau e b s000 0 eng 2012010493016117483Uk8116476349780520270237 (cloth : acid-free paper)0520270231 (cloth : acid-free paper)(OCoLC)785873735(OCoLC)811647634DLCengDLCIG#BTCTABDXUKMGBOCLCOVP@YDXCPBWXZCUZAGPULAU@NTGUtOrBLWpccNTGARJ216.C652 2012649/.3323649.33 COBCobb-Barston, Suzanne Michaels,1978-Bottled up :how the way we feed babies has come to define motherhood, and why it shouldn't /Suzanne Barston.Berkeley :University of California Press,c2012.x, 211 p. ;22 cm.Includes bibliographical references (p. 185-211).Preconceived notions --Lactation failures --Of human bonding --The dairy queens --Damn lies and statistics --Soothing the savage breast.Discusses the issue of breast feeding and whether it is fair to judge parenting on breast vs. bottle as opposed to making the right choice for a family.Breastfeeding.BreastfeedingComplications.BreastfeedingSocial aspects.AMW2012-12-28aC0NTG",null,null,"OCLC","o785873735"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"24","IMPORT-1377205600.55222","01110cam a22003494a 4500ocn781679234OCoLC20121228095300.0120816s2012 caua e b 001 0 eng 20120328959781609945282 (pbk.)160994528X (pbk.)(OCoLC)781679234DLCengDLCBTCTABDXJAIYDXCPCDXIG#BWXVP@NTGUtOrBLWpccNTGAHD62.5.C6353 2012658.1/123658.11 COHCohan, Peter S.,1957-Hungry start-up strategy :creating new ventures with limited resources and unlimited vision /Peter S. Cohan.San Francisco :Berrett-Koehler Publishers,c2012.xiv, 244 p. :ill. ;24 cm.Includes bibliographical references and index.New business enterprises.Strategic planning.Venture capital.Entrepreneurship.bp2012-12-28aC0NTG",null,null,"OCLC","o781679234"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"25","IMPORT-1377205600.55222","01296cam a2200349Ia 4500ocn778419462OCoLC20130614123745.0120223s2012 txua e b 001 0 eng d9781937856106 (pbk.)1937856100 (pbk.)(OCoLC)778419462BTCTAengBTCTABDXKNJYDXCPGK7OCLCOCXPIFJCDXNTGUtOrBLWNTGAHF5415.5.F59 2012658.81223658.812 FLYFlynn, Anthony,1983-Custom nation :why customization is the future of business and how to profit from it /Anthony Flynn, Emily Flynn Vencat ; with Dennis Flynn.Dallas Texas :BenBella Books, Inc.,c2012.xii, 240 p. :ill. ;21 cm.Includes bibliographical references (p. 193-202) and index.Explains how customization can make any business stand apart and generate market share, increase profit margins, and develop customer loyalty.Market segmentation.Entrepreneurship.Success in business.Vencat, Emily Flynn.Flynn, Dennis C.AMW2012-12-28aC0NTG",null,null,"OCLC","o778419462"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"26","IMPORT-1377205600.55222","01408cam a2200373Ia 4500ocn781679065OCoLC20130614123745.0120321s2012 ii a e 000 0aeng d97893803401739380340176(OCoLC)781679065BTCTAengBTCTAYDXCPKNJNTGUtOrBLWengtama-ii---NTGA741.092 TEJTejubehan(Singer)Drawing from the city :based on the oral stories of /Tejubehan ; original Tamil text: Saalai Selvam ; English text: V. Geetha & Gita Wolf.Chennai, India :Tara Books,c2012.1 v. (unpaged) :ill. ;37 cm.Teju Behan is a singer and self taught urban folk artist from Ahmedabad in western India who describes her life of poverty until a job working as a singer with a fellow artist led her to discover her own artistic talent.Tejubehan(Singer)Folk artistsIndiaAhmadābādBiography.Women artistsIndiaAhmadābādBiography.SingersIndiaAhmadābādBiography.Folk artIndiaAhmadābād.Selvam, Saalai.Kītā, Va.Wolf, Gita.csr2012-12-28aC0NTG",null,null,"OCLC","o781679065"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"27","IMPORT-1377205600.55222","01158cam a2200325Ma 4500ocn809154144OCoLC20121228213702.0040813s2012 gaua e 6 000 1deng 9781603092654160309265X(OCoLC)809154144AU@engAU@OCLCOUtOrBLWn-us---NTGA741.597322741.56973 KOL V.4Kochalka, James.American elf :the collected sketchbook diaries of James Kochalka.Book four,January 1, 2008 to December 31, 2011.Marietta, Ga. :Top Shelf,c2012.1 v. (unpaged) :chiefly ill. ;22 cm.Collects five years of the semi-autobiographical online comic strip diary American Elf in which the author depicts himself as an elf.Kochalka, JamesDiariesComic books, strips, etc.CartoonistsUnited StatesDiariesComic books, strips, etc.Comic books, strips, etc.Graphic novels.lmc2012-12-28aC0NTG",null,null,"OCLC","o809154144"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"28","IMPORT-1377205600.55222","02374cam a2200445Ia 4500ocn795182099OCoLC20130111155049.0120530s2012 orua e 6 000 f eng d016099256Uk9781595829429 (pbk.)1595829423 (pbk.)(OCoLC)795182099UKMGBengUKMGBBDXOCLCQNTGUtOrBLWengjpnNTGA741.523741.5952 MIU V.36Miura, Kentarō.Beruseruku.EnglishBerserk.36 /by Kentaro Miura ; translation, Duane Johnson ; lettering and retouch, Replibooks.Milwaukie, Or. :Dark Horse Manga ;London :Diamond [distributor],2012.1 v. (unpaged) :chiefly ill. ;19 cm.Translated from the Japanese with katakana and English sound effects.\"First published in Japan in 2011 by Hakusensha, Inc., Tokyo\"--T.p. verso.\"English-language translation © 2012 by Dark Horse Comics, Inc. and Digital Manga Inc.\"--T.p. verso.\"This collection is translated into English but oriented in right-to-left reading format, as originally published\"--P. [4] of cover.\"Parental advisory: explicit content\"--P. [1] of cover.\"For readers 18+. For mature readers\"--P. [4] of cover.\"What Guts the Black Swordsman and his companions had seen as an island refuge from monstrous sea creatures and the ghost ship of the demonic Bonebeard is in fact the very lair of the malefic sea god that controls them. With no avenue of escape, Guts must once again don the Berserker armor to give his company any chance of survival. But without the protective influence of the sorceress Schierke, Guts risks being forever lost within the cursed armor and becoming an even greater threat to his comrades\"--P. [4] of cover.SwordsmenComic books, strips, etc.Imaginary wars and battlesComic books, strips, etc.Fantasy comic books, strips, etc.Graphic novels, JapaneseTranslations into English.localComic books, strips, etc.Japan.Johnson, Duane,1976-trlslh2013-01-11aC0NTG",null,null,"OCLC","o795182099"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"29","IMPORT-1377205600.55222","01313cam a2200361 a 4500ocn775416420OCoLC20121228003207.0120416s2012 quca e 6 000 0 eng 201290242369781770460874177046087X(OCoLC)775416420NLCengNLCBTCTACDXVP@YDXCPOCOBDXBKXBWXZCULIVNTGUtOrBLWn-us-nyNTGAPN6727.T65N49 2012741.5/97323741.5973 TOMTomine, Adrian,1974-New York drawings :a decade of covers, comics, illustrations, and sketches from the pages of The New Yorker and beyond /Adrian Tomine.1st ed.Montreal :Drawn & Quarterly,2012.175 p. :chiefly col. ill. ;29 cm.Includes drawings previously published in The New Yorker.Collects the artist's comics, illustrations, and covers produced for The New Yorker magazine, as well as other uncollected works inspired by New York City.New York (N.Y.)Comic books, strips, etc.Graphic novels.New Yorker.vf2012-12-27aC0NTG",null,null,"OCLC","o775416420"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"30","IMPORT-1377205600.55222","01631cam a22004214a 4500ocn759908528OCoLC20121228152849.0111213s2012 nyua e b 001 0 eng 2011046776016102620Uk9780307587107 (alk. paper)030758710X (alk. paper)9780307587114 (ebook)0307587118 (ebook)(OCoLC)759908528DLCengDLCYDXBTCTABDXYDXCPUKMGBOCLCOBWXCZACDXNTGUtOrBLWpccNTGAHF5439.H27R37 2012745.506823745.5068 RANRand, Kelly,1979-Handmade to sell :Hello Craft's guide to owning, running, and growing your crafty biz /by Kelly Rand ; with Christine Ernest ... [et al.] ; illustrations by Jaime Zollars.1st ed.New York :Potter Craft,c2012.175 p. :ill. ;21 cm.Includes bibliographical references and index.An all-encompassing guide to starting and running a successful craft business draws on the expertise of the well-known nonprofit trade organization and provides authoritative coverage of everything from developing successful product lives and preparing taxes to forming LLCs.SellingHandicraft.HandicraftMarketing.Small businessManagement.Ernest, Christine.Zollars, Jaime.vf2012-12-27aC0NTG",null,null,"OCLC","o759908528"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"31","IMPORT-1377205600.55222","01622cam a2200409Ia 4500ocn751834983OCoLC20121228205355.0110823s2011 ohua e 001 0 eng dGBB1A5955bnb015885121Uk7072124977600823677946926829781440315244 (hbk.)1440315248 (hbk.)(OCoLC)751834983(OCoLC)707212497(OCoLC)760082367(OCoLC)794692682UKMGBengUKMGBWIQHCOYDXCPBKXQBXTOHBDXUKWOHBTCTABWXNTGUtOrBLWNTGAND1351.6.A77 2011ARTeflch758.173092223758.173092 ARTArt journey America :landscapes : 89 painters' perspectives /edited by Kathy Kipp.Landscapes :89 painters' perspectives1st ed.Cincinnati, Ohio :North Light Books,c2011.192 p. :col. ill. ;31 cm.Includes index.Showcasing the work of 100 top contemporary American master artists of our day, this book features landscapes from all across the country - east and west, north and south - rendered in watercolour, oil, acrylic, pastel and mixed media.Landscape painting, American20th century.Landscape painting, American21st century.Landscape paintersUnited StatesInterviews.Kipp, Kathryn,1946-North Light Books (Firm)csr2012-12-28aC0NTG",null,null,"OCLC","o751834983"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"32","IMPORT-1377205600.55222","01323cam a2200385Ia 4500ocm58602313 OCoLC20130215212952.0jstn r sd fungnnmm|||050223s2004 cauag e 000 0 eng d9780739036358073903635103808123297323197Alfred Pub. Co.23198Alfred Pub. Co.(OCoLC)58602313RBNRBNWTXMDYBAKERBTCTAORXBDXYDXCPNTGUtOrBLWNTGAMT7.S87A44x 2004781.2SURMANI781.2 SURSurmani, Andrew.Alfred's essentials of music theory :a complete self-study course for all musicians /Andrew Surmani, Karen Farnum Surmani, Morty Manus.Essentials of music theoryVan Nuys, CA :Alfred Pub. Co.,c2004.151 p. :ill., music ;30 cm. +2 sound discs (digital ; 4 3/4 in.)\"For pianists, guitarists, instrumentalists, vocalists, songwriters, arrangers & composers\"--Cover.Music theorySelf-instruction.Surmani, Karen Farnum.Manus, Morton.csr2013-02-15aC0NTG",null,null,"OCLC","o58602313"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"33","IMPORT-1377205600.55222","01208cam a2200325 a 4500ocn798059648OCoLC20121228154005.0121109s2012 cau e b 001 0 eng 20120409049781593765118 (pbk.)1593765118 (pbk.)(OCoLC)798059648DLCengDLCIG#BTCTABDXYDXCPOCLCOBWXNTGUtOrBLWNTGAPN1997.P793R43 2012791.43/7223791.43023 REBRebello, Stephen.Alfred Hitchcock and the making of Psycho /Stephen Rebello.Berkeley, CA :Soft Skull Press,c2012.288 p. ;23 cm.Includes bibliographical references (p. [269]-273) and index.The awful truth --The novel --The director --The deal --The screenplays --Preproduction --Shooting --Postproduction --Publicity --The release --Afterglow and aftermath.Psycho (Motion picture : 1960)Hitchcock, Alfred,1899-1980Criticism and interpretation.jab2012-12-28aC0NTG",null,null,"OCLC","o798059648"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"34","IMPORT-1377205600.55222","02267cam a2200349 a 4500ocn776526126OCoLC20121228182145.0120503s2012 nyua e 000 0ceng 2012016146016131546Uk97814555161311455516139(OCoLC)776526126DLCengDLCYDXBTCTABDXYDXCPUKMGBOCLCOWIHRCJZAGCDXQBXCGPNTGUtOrBLWNTGAGV697.A1J478 2012796.08992423796.089924 JEWJewish jocks :an unorthodox hall of fame /edited by Franklin Foer and Marc Tracy.1st ed.New York :Twelve,2012.xiv, 285 p. :ill. ;24 cm.With contributions from celebrated Jewish writers including David Remnick, Jonathan Safran Foer, and Dahlia Lithwick, provides an overview of the most influential Jewish figures in sports, from Howard Cosell to Sandy Koufax.Daniel Mendoza : the king's pugilist /Simon Schama --Max Nordau : philosopher of the muscle Jews /Timothy Snyder --Barney Sedran : tiny baller /Rebecca Newberger Goldstein --Benny Leonard : Mama said knock you out /Franklin Foer --Mose Solomon : the hunt for the Hebrew Ruth /Robert Weintraub --Whitey Bimstein : cutman /Douglas Century --Sidney Franklin : matador from Flatbush /Tom Rachman --Arnold Rothstein : American Shylock /Ron Rosenbaum --Barney Ross : Kaddish for a welterweight /Buzz Bissinger --Marty Reisman : ping-pong wizard /Howard Jacobson --Hank Greenberg : the plot against Greenberg? /Ira Berkow --Helene Mayer : fencing for Hitler /Joshua Cohen --Al Rosen : I'm not Greenberg /David Margolick --Sid Luckman : Hebrew mind, cossack body /Rich Cohen --Grigory Novak : Soviet strongman /David Bezmozgis --Jack Molinas : the point-shaver /Chad Millman --Dolph Schayes : power forward /Marc Tracy --Red Auerbach : the coach who never paid retail /Steven PinkerJewish athletesBiography.Foer, Franklin.Tracy, Marc.vf2012-12-28aC0NTG",null,null,"OCLC","o776526126"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"35","IMPORT-1377205600.55222","01050cam a2200301Ia 4500ocn820193187OCoLC20130614123745.0121204s2012 cau e b 000 0 eng d9780983632948 (pbk.)0983632944 (pbk.)(OCoLC)820193187GO6GO6GO6NTGUtOrBLWNTGA808.3808.3 LOWLowenkopf, Shelly.The fiction writer's handbook :[the definitive guide to McGuffins, red herrings, shaggy dogs, and other literary revelations from a master] /Shelly Lowenkopf ; foreword by Christopher Moore.Los Angeles, CA :White Whisker Books,2012.334 p. ;23 cm.Includes bibliographical references (p. 329-333).Short entries describing the terms and processes used in writing fiction.FictionTechnique.Creative writing.vf2012-12-28aC0NTG",null,null,"OCLC","o820193187"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"36","IMPORT-1377205600.55222","02291cam a2200385 a 4500ocn793911008OCoLC20130614123745.0120511s2012 arua e b 000 0 eng 2012018195016230529Uk1935106503 (hardcover : alk. paper)9781935106500 (hardcover : alk. paper)(OCoLC)793911008DLCengDLCYDXBTCTAYDXCPBWXBDXCDXORXKCPIXAUKMGBVP@UtOrBLWpccNTGAPS3566.O663A6 2012813/.5423818.54 PORPortis, Charles.Works.Selections.2012Escape velocity :a Charles Portis miscellany /edited and with an introduction by Jay Jennings ; cover art and illustrations by Mike Reddy.Little Rock, Ark. :Butler Center for Arkansas Studies,c2012.xxii, 358 p. :ill. ;24 cm.Includes bibliographical references.Elected newspaper reporting and writing. Memphis commercial appeal(1958) --Arkansas gazette(1959-1960) --New York herald tribune(1960-1964) --General assignment --Rights reporting --London bureau --Travels --That new sound from Nashville --An auto odyssey through darkest Baja --The forgotten river --Motel life, lower reaches --Short stories. Your action line --Nights can turn cool in Viborra --I don't talk service no more --The wind bloweth where it listeth --Memoir. Combinations of Jacksons --Drama. Delray's new moon --Epilogue. interview[s] --Gazette project interview (by Roy Reed) --Tributes. Comedy in earnest (by Roy Blount Jr.) --Like Cormac McCarthy, but funny (by Ed Park) --Our least-known great novelist (by Ron Rosenbaum) --Afterword to True grit (by Donna Tartt) --The book that changed my life: Gringos (by Wells Tower).Brings together Portis' writings other than his four novels, including journalism, travel stories, short fiction, memoir, and even a play.Portis, CharlesAppreciation.Jennings, Jay,1957-Reddy, Mike.lmc2012-12-21aC0NTG",null,null,"OCLC","o793911008"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"37","IMPORT-1377205600.55222","01306cam a2200349 a 4500ocn777660045OCoLC20121228181933.0120529s2012 nyua e b 000 0 eng 20120216299781590515662 (hc. : acid-free paper)1590515668 (hc. : acid-free paper)9781590515679 (ebook)1590515676 (ebook)(OCoLC)777660045DLCengDLCYDXBTCTABDXOCLCOYDXCPOPWNTGUtOrBLWNTGAPQ2631.R63Z78925 2012843/.91223843.912 MUHMuhlstein, Anka.Monsieur Proust's library /Anka Muhlstein.New York :Other Press,c2012.xiv, 141 p. :ill. ;22 cm.Includes bibliographical references.First impressions and lasting influences --Foreign incursions --Good readers and bad readers --A homosexual reader: Baron de Charlus --Racine: a second language --The Goncourts --Bergotte: the writer in the novel.Proust, Marcel,1871-1922Books and reading.Proust, Marcel,1871-1922Characters.vf2012-12-28aC0NTG",null,null,"OCLC","o777660045"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"38","IMPORT-1377205600.55222","02410cam a2200433Ia 4500ocn783162529OCoLC20130614123745.0120330s2012 enkab e b 001 0 eng d 2011945993016122239Uk8132225789780500051733 (cloth)0500051739 (cloth)(OCoLC)783162529(OCoLC)813222578BTCTAengBTCTABDXUKMGBYDXCPBWKYNKHLSOCLCOHHOCDXORXAU@BWXJCUOCLCOYYPCUTMUUNTGUtOrBLWf-ua---NTGADT73.T25K46 2012932.01423932.014 KEMKemp, Barry J.The city of Akhenaten and Nefertiti :Amarna and its people /Barry Kemp.London :Thames & Hudson,c2012.320 p. :ill. (some col.), maps ;26 cm.New aspects of antiquityIncludes bibliographical references (p. 306-313) and index.The ancient site of Tell el-Amarna in Middle Egypt was the capital city of the heretic pharaoh Akhenaten and his chief consort, Nefertiti. Occupied for just sixteen or so years in the fourteenth century BC, the city lay largely abandoned and forgotten until excavations over the last hundred years brought it back into prominence. Based on more than three decades of research and excavation by Barry Kemp, this account provides new insight into Amarna and its people.The author brings to life the royal family and their offspring, including Tutankhamun, as well as prominent citizens such as the high priest Panehsy, the vizier Nakht, the general Ramose, and the sculptor Thutmose.The Cast of characters --City of the horizon --Building a vision --Akhenaten's resources --The city of the Sun-God --The apartments of the Pharaoh --City of people --The quality of life --Spiritual life at Amarna --What kinds of city? --An end and a beginning.Tell el-Amarna (Egypt)History.Akhenaton,King of Egypt.Nefertiti,Queen of Egypt,active 14th century B.C.PharaohsBiography.EgyptHistoryEighteenth dynasty, ca. 1570-1320 B.C.New aspects of antiquity.csr2012-12-28aC0NTG",null,null,"OCLC","o783162529"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"39","IMPORT-1377205600.55222","01890cam a2200457 a 4500ocn778636572OCoLC20121228201118.0120213s2012 nyu e b 001 0 eng 2012004336016167579Uk8099360499780199916986 (alk. paper)0199916985 (alk. paper)9780199916993 (ebk.)0199916993 (ebk.)(OCoLC)778636572(OCoLC)809936049DLCengDLCYDXBTCTAYDXCPOCLCOUKMGBBWXCUTVVCCOONTGUtOrBLWpcca-is---awba---NTGAJZ5540.G67 2012956.05092223956.050922 GOPGopin, Marc.Bridges across an impossible divide :the inner lives of Arab and Jewish peacemakers /Marc Gopin.New York :Oxford University Press,c2012.ix, 241 p. ;22 cm.Includes bibliographical references and index.Pt. 1.Self examination and identity. Ibtisam Mahameed: an introductory Palestinian case study. Eliyahu McLean: an introductory Israeli Jewish case study --pt. 2.Peacemakers in their own words. Sheikh Abdul Aziz Bukhari. Gabriel Meyer. Ihab Balha. Hind Kabawat. Marc Gopin --pt. 3.The inner life of peacemakers. A summary of the peacemakers' transformative qualities. The inner life of the peacemaker and the future of global changePeace-buildingIsrael.Peace-buildingPalestine.Reconciliation.Conflict management.CommunicationSocial aspects.Arab-Israeli conflict1993-Peace.IsraelEthnic relations.JL2012-12-28aC0NTG",null,null,"OCLC","o778636572"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"40","IMPORT-1377205600.55222","01996cam a2200361 a 4500ocn778074340OCoLC20121228202404.0120222s2012 nyua e b 000 0deng 20120049979780374298807 (hbk.)0374298807 (hbk.)(OCoLC)778074340DLCengDLCIG#BTCTABDXUKMGBOCLCOJAIYDXCPBURCDXZQPBWXNTGUtOrBLWNTGAPR4582.G88 2012823/.823B DICKENSGottlieb, Robert,1931-Great expectations :the sons and daughters of Charles Dickens /Robert Gottlieb.Sons and daughters of Charles Dickens1st ed.New York :Farrar, Straus and Giroux,2012.239 p. :ill. ;22 cm.Includes bibliographical references (p. [243-244]).Charles Dickens, famous for the indelible child characters he created--from Little Nell to Oliver Twist and David Copperfield--was also the father of ten children (and a possible eleventh). What happened to those children is the fascinating subject of Robert Gottlieb's Great Expectations. With sympathy and understanding he narrates the highly various and surprising stories of each of Dickens's sons and daughters, from Kate, who became a successful artist, to Frank, who died in Moline, Illinois, after serving a grim stretch in the Royal Canadian Mounted Police. Each of these lives is fascinating on its own; together they comprise a unique window on Victorian England as well as a moving and disturbing study of Dickens as a father and as a man.--From publisher description.Dickens, Charles,1812-1870Family.Children of authorsEngland19th centuryBiography.Authors, English19th centuryFamily relationships.csr2012-12-28aC0NTG",null,null,"OCLC","o778074340"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"41","IMPORT-1377205600.55222","01636cam a2200457 i 4500ocn818985184OCoLC20121228085108.0121115t20122012miuaf e 000 0aeng 20120423898110071489780310740612 (hardcover)0310740614 (hardcover)(OCoLC)818985184(OCoLC)811007148DLCengrdaDLCOCLCOOEMBTCTAZQPUPZOPWPCXYDXCPTXANTGUtOrBLWpccn-us---NTGAGV460.2.D68A3 2012796.44092B23B DOUGLASDouglas, Gabrielle,1995-Grace, gold & glory :my leap of faith /Gabrielle Douglas ; with Michelle Burford.Grace, gold and gloryGrand Rapids, Michigan :Zondervan,[2012]©2012222 pages, [8] pages of plates :color illustrations ;24 cmtextrdacontentunmediatedrdamediavolumerdacarrierThe U.S. gymnast all-around gold medal winner at the 2012 London Olympics tells her story of faith, perseverance, and determination.Douglas, Gabrielle,1995-Women gymnastsUnited StatesBiography.Women Olympic athletesUnited StatesBiography.GymnastsUnited StatesBiography.Olympic athletesUnited StatesBiography.Burford, Michelle,author.bp2012-12-28aC0NTG",null,null,"OCLC","o818985184"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"42","IMPORT-1377205600.55222","00880nam a2200301Ka 4500ocn814290313OCoLC20130614123745.0121024s2013 flua e 001 0 eng d9781618100825 (hc)1618100823 (hc)(OCoLC)814290313APLAPLNTGUtOrBLWNTGA613.043223E613.0432 CLECleland, Jo.Clean teeth, dirty teeth /Jo Cleland.Vero Beach, Fla. :Rourke Educational Media,c2013.24 p. :col. ill. ;21 cm.Healthy habitsIncludes bibliographical references (p.24) and index.ChildrenHealth and hygiene.Sing and read.Healthy habits.AMW2012-12-28aC0NTG",null,null,"OCLC","o814290313"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"43","IMPORT-1377205600.55222","00862nam a2200301Ka 4500ocn814290240OCoLC20130614123745.0121024s2013 flua e 001 0 eng d9781618100818 (hc)1618100815 (hc)(OCoLC)814290240APLAPLNTGUtOrBLWNTGA613.043223E613.0432 CLECleland, Jo.Achoo! /Jo Cleland.Vero Beach, Fla. :Rourke Educational Media,c2013.24 p. :col. ill. ;21 cm.Healthy habitsIncludes bibliographical references (p.24) and index.ChildrenHealth and hygiene.Sing and read.Healthy habits.AMW2012-12-28aC0NTG",null,null,"OCLC","o814290240"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"44","IMPORT-1377205600.55222","00880nam a2200301Ka 4500ocn814290314OCoLC20130614123745.0121024s2013 flua e 001 0 eng d9781618100801 (hc)1618100807 (hc)(OCoLC)814290314APLAPLNTGUtOrBLWNTGA613.043223E613.0432 CLECleland, Jo.Clean hands, dirty hands /Jo Cleland.Vero Beach, Fla. :Rourke Educational Media,c2013.24 p. :col. ill. ;21 cm.Healthy habitsIncludes bibliographical references (p.24) and index.ChildrenHealth and hygiene.Sing and read.Healthy habits.AMW2012-12-28aC0NTG",null,null,"OCLC","o814290314"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"45","IMPORT-1377205600.55222","01308cam a2200361Ia 4500ocn806521671OCoLC20130614123745.0120817s2012 nvu e 000 1 eng d1612184227 (pbk.)9781612184227 (pbk.)(OCoLC)806521671YDXCPYDXCPBTCTABDXOEICPPNTGUtOrBLWNTGAPS3601.N55524I36 2012ANNECHIFIC ANNECHINOAnnechino, Daniel M.I do solemnly swear /D. M. Annechino.Las Vegas, NV :Thomas & Mercer,c2012.291 p. ;21 cm.As second in command, Katherine Ann Miles understands the responsibilities of her role as vice president. But when the president of the United States dies from a heart attack only eight months into his term, Katherine feels utterly unprepared to assume the highest office in the land.PresidentsFiction.AssassinsFiction.ConspiraciesFiction.TerrorismPreventionFiction.Washington (D.C.)Fiction.Political fiction.Suspense fiction.jab2012-12-28aC0NTG",null,null,"OCLC","o806521671"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"46","IMPORT-1377205600.55222","01931cam a22003618a 4500ocn795168262OCoLC20121228202119.0121017s2013 nyu e 000 1 eng 201203911697804252528400425252841(OCoLC)795168262DLCengDLCBTCTAGO9IUKCXPNTGUtOrBLWn-us-caNTGAPS3556.O828R63 2013813/.5423FIC FOWLERFowler, Earlene.The road to Cardinal Valley /Earlene Fowler.1st ed.New York :Berkley Prime Crime,2013.viii, 291 p. ;24 cm.\"Ruby never thought she'd return to Cardinal, but she's hoping the place and people who gave her so much can give her brother Nash-who's been drowning in drink in Nashville-the fresh start he so desperately needs. Saddlemaker Lucas McGavin is thrilled that Ruby has come back. He hasn't given up on his love for her, despite the awkward fact that she is his brother's widow, and he's well aware that this may be his last chance to win Ruby's heart. When Nash starts drinking again and ends up in a devastating accident, Ruby decides she must find her estranged mother to help with an intervention. Two states away, Etta Walker harbors a horrible secret that keeps her from reconnecting with the children she deserted so many years ago. As they struggle with the present and confront the past, Ruby, Lucas, and Etta learn the power of forgiveness...and reach for a new future filled with hope, grace, and love.\"--Dust jacket.WidowsFiction.FamiliesCaliforniaFiction.Ranch lifeCaliforniaFiction.Sierra Nevada (Calif. and Nev.)Fiction.bp2012-12-28aC0NTG",null,null,"OCLC","o795168262"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"47","IMPORT-1377205600.55222","01951cam a22004938a 4500ocn785077565OCoLC20121228115603.0121010s2012 nyu e 000 1 eng 201203774797803991606600399160663(OCoLC)785077565DLCengDLCBTCTABDXYDXCPOCLCOUPZGK5IUKNSBNTGUtOrBLWn-us---NTGAPS3557.R489137E47 2012813/.5423FIC GRIFFINGriffin, W. E. B.Empire and honor /W.E.B. Griffin and William E. Butterworth IV.New York :G. P. Putnam's Sons,c2012.517 p. ;24 cm.Honor bound series ;7In the aftermath of the surrenders of Germany and Japan in October 1945, Cletus Frade and his colleagues in the OSS are given the life-threatening task of maintaining security during a covert U.S. deal with Germany for intelligence about the identities of Soviet spies in the American atomic bomb program.United States.Office of Strategic ServicesFiction.Intelligence officersUnited StatesFiction.Frade, Cletus (Fictitious character)Fiction.World War, 1939-1945Secret serviceUnited StatesFiction.World War, 1939-1945Secret serviceSoviet UnionFiction.Atomic bombFiction.World War, 1939-1945Fiction.Spy stories.gsafdSuspense fiction.gsafdHistorical fiction.gsafdSpy stories.Suspense fiction.Historical fiction.Butterworth, William E.(William Edmund)Griffin, W. E. B.Honor bound ;7.bp2012-12-28aC0NTG",null,null,"OCLC","o785077565"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"48","IMPORT-1377205600.55222","01356cam a2200397Ma 4500ocn801606122OCoLC20130614123746.0120710s2012 enk e 000 f eng d016127359Uk016147236Uk9781846140495 (hbk.)1846140498 (hbk.)(OCoLC)801606122UKMGBengUKMGBOCLCOCDXPZIMLYOCLCOYDXCPNTGUtOrBLWengfreNTGAPQ843.723FIC HUGOHugo, Victor,1802-1885.Les misérables /Victor Hugo ; translated and introduced by Norman Denny.London :Penguin Classics,2012.1,231 p. ;21 cm.Translated from the French.First published in 1862.Story of Valjean, the ex-convict who rises against all odds from galley slave to mayor, and the fanatical police inspector who dedicates his life to recapturing Valjean.Ex-convictsFiction.OrphansFiction.FranceHistory19th centuryFiction.FranceHistoryJuly Revolution, 1830Fiction.Paris (France)Fiction.jab2012-12-28aC0NTG",null,null,"OCLC","o801606122"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"49","IMPORT-1377205600.55222","02029cam a22003618a 4500ocn796755120OCoLC20121228195549.0120918s2012 cau e 000 1 eng 20120356599781596923836 (hardcover)1596923830 (hardcover)(OCoLC)796755120DLCengDLCBTCTAYDXCPOCLCOBDXOCPABGCDXNTGUtOrBLWNTGAPS3611.O3643P48 2012813/.623FIC KOENIGSDORFKoenigsdorf, Jill.Phoebe & the ghost of Chagall :a novel /by Jill Koenigsdorf.Phoebe and the ghost of ChagallSan Francisco, CA :MacAdam/Cage,c2012.356 p. ;24 cm.Phoebe is an artist making very little money designing wine labels for a winery in Sonoma. Her house is in foreclosure, she's divorced, turning forty, and beleaguered on every front. Enter Marc Chagall s ghost, visible only to her, who appears to help her retrieve one of his own paintings that Phoebe's father found during the liberation of France. Meant for Phoebe and her mother, the painting never made it into their hands. In this debut comic novel, Phoebe and Chagall hunt down the painting in the South of France with help from a cast of characters including two sisters who are witches, a San Francisco Art dealer, and a misguided French innkeeper. Their snooping also leads Chagall to a few out of the hundred paintings that went missing during his lifetime. With skill and tension this book pits characters who appreciate art for its beauty against black market art dealers, evil collectors, and the mysterious German pawn hired to deliver the goods.Women artistsFiction.Ghost stories.gsafdHumorous fiction.gsafdGhost stories.Humorous fiction.JL2012-12-28aC0NTG",null,null,"OCLC","o796755120"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"50","IMPORT-1377205600.55222","01411cam a2200373Ia 4500ocn467179667OCoLC20121228021541.0091119r20092003nyu e 000 1 eng d32018722497805535934190553593412(OCoLC)467179667(OCoLC)320187224IMFIMFBTCTAIHVORXBDXYDDNTGUtOrBLWn-us-caNTGAPS3561.O55F33 2009813/.5423FIC KOONTZKoontz, Dean R.(Dean Ray),1945-The face :a novel /Dean Koontz.Bantam Mass Market ed.New York :Bantam Books,2009, c2003.649 p. ;20 cm.\"Originally published in hardcover in the United States by Bantam Books in 2003\"--T.p. verso.A riveting tour de force of suspense, mystery, and miraculous revelation, The Face is that rare novel that entertains, provokes, and uplifts at the same time. It will make you laugh, It will give you chills, It will fill you with hope.Motion picture actors and actressesFiction.Hollywood (Los Angeles, Calif.)Fiction.Private security servicesFiction.Stalking victimsFiction.bp2012-12-28aC0NTG",null,null,"OCLC","o467179667"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"51","IMPORT-1377205600.55222","01267cam a2200397Ia 4500ocn793220360OCoLC20130614123746.0120501s2012 enk e 000 1 eng d9780857662781 (pbk.)0857662783 (pbk.)(OCoLC)793220360BTCTAengBTCTABDXYDXCPIEPCO2NTGUtOrBLWNTGAPR6112.Y519M47 2012823/.9223FIC LYLELyle, Anne.The merchant of dreams /Anne Lyle.Nottingham, [England] :Angry Robot,2012.522 p. ;18 cm.Night's masque ;vol. IIIn this sequel to The Alchemist of Souls, a group of renegades cause a rift among the Skraylings.AssassinsFiction.Imaginary societiesFiction.BodyguardsFiction.MagicFiction.EnglandSocial life and customs16th centuryFiction.Great BritainFiction.Historical fiction.Fantasy fiction.Lyle, Anne.Night's masque ;. 2.vf2012-12-28aC0NTG",null,null,"OCLC","o793220360"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"52","IMPORT-1377205600.55222","01107cam a2200361Ia 4500ocn807034417OCoLC20121228194114.0120816s2012 nvu e 000 1 eng d 20129110539781612184319 (pbk.)1612184316 (pbk.)(OCoLC)807034417BTCTAengBTCTAYDXCPBDXJBUORXIG$NTGUtOrBLWengicee-ic---NTGA839/.693423FIC RAGNA SIGURDARDOTTIRRagna Sigurðardóttir.Hið Fullkomma landslag.EnglishThe perfect landscape /Ragna Sigurdardottir ; translated by Sarah Bowen.Las Vegas, NV :AmazonCrossing,2012.213 p. ;21 cm.Translation of: Hið Fullkomma landslag.Art museumsIcelandFiction.ArtForgeriesFiction.IcelandFiction.Bowen, Sarah,1957-trlAMW2012-12-28aC0NTG",null,null,"OCLC","o807034417"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"53","IMPORT-1377205600.55222","00851cam a2200313Ia 4500ocn693705050OCoLC20121228161942.0101217s2010 inu e 000 f eng d69370505114502578019781450257800145025781X9781450257817(OCoLC)693705050(OCoLC)693705051YDXCPengYDXCPOCLCQBDXJBUNTGUtOrBLWNTGAFIC WILCOXWilcox, C. E.A reluctant assassin /C. E. Wilcox.Bloomington, IN :iUniverse,2010.251 p. ;24 cm.MarinesFiction.AssassinsFiction.Organized crimeFiction.vf2012-12-28aC0NTG",null,null,"OCLC","o693705050"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"54","IMPORT-1377205600.55222","01198cam a2200325Ma 4500ocn812070879OCoLC20121228220145.0120926s2013 nyu e 000 f eng d016181843Uk97807582784320758278438(OCoLC)812070879UKMGBengUKMGBOCLCOZS3LEBILCSRCNTGUtOrBLWe-gx---NTGAPS3623.I832P58 2013813.623FIC WISEMANWiseman, Ellen Marie.The plum tree /Ellen Marie Wiseman.New York :Kensington Books,c2013.387 p. ;21 cm.In the fall of 1938, as Germany rapidly changes under Hitler's regime, 17-year-old Christine Bolz, a domestic forbidden to return to the wealthy Jewish family she works for - and to her employer's son Isaac, confronts the Gestapo's wrath and the horrors of Dachau to survive and to be with the man she loves.GermanyHistory1933-1945Fiction.Historical fiction.gsafdJL2012-12-28aC0NTG",null,null,"OCLC","o812070879"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"55","IMPORT-1377205600.55222","01898cam a2200529 a 4500ocn773667632OCoLC20130614123746.0120215s2012 nyua c 000 1 eng 20110519819780375867415 (trade)0375867414 (trade)9780375967412 (library binding)0375967419 (library binding)9780375864957 (trade paperback)0375864954 (trade paperback)9780375897900 (ebook)0375897909 (ebook)(OCoLC)773667632DLCengDLCBTCTABDXYDXCPWIQVP@NTGUtOrBLWpcclcacNTGAPZ7.B38823495Red 2012[Fic]23J BEILBeil, Michael D.The Red Blazer Girls :the secret cellar /Michael D. Beil.Secret cellar1st ed.New York :Alfred A. Knopf,c2012.274 p. :ill. ;22 cm.Red Blazer Girls ;[bk. 4]When Sophie finds a secret message in the antique fountain pen she bought for her father, she and her friends become involved in a treasure hunt devised by the pen's previous owner, whose house is full of puzzles that protect a hidden treasure.Mystery and detective stories.PuzzlesFiction.Buried treasureFiction.Eccentrics and eccentricitiesJuvenile fiction.ChristmasFiction.Catholic schoolsJuvenile fiction.SchoolsFiction.PuzzlesJuvenile fiction.Treasure trovesJuvenile fiction.Beil, Michael D.Red Blazer Girls ;bk. 4.jab2012-12-28aC0NTG",null,null,"OCLC","o773667632"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"56","IMPORT-1377205600.55222","01648cam a22004098a 4500ocn766606333OCoLC20121228094822.0111201s2012 nyu c 000 1 eng 20110401329780761462279 (hardcover)0761462279 (hardcover)9780761462286 (ebook)0761462287 (ebook)(OCoLC)766606333DLCengDLCBDXIUKJP3NTGUtOrBLWlcacpccNTGAPZ7.B56513Mar 2012[Fic]23J BLAKEBlake, Stephanie(Stephanie J.),1969-The Marble Queen /Stephanie J. Blake.1st ed.New York :Marshall Cavendish Children,c2012.178 p. ;22 cm.Freedom Jane McKenzie does not like following rules, especially about what girls should do, but what she wants most of all is to enter and win the marble competition at the Autumn Jubilee to prove herself worthy of the title, Marble Queen.In 1959, ten-year-old Freedom Jane McKenzie wants to enter and win the marble competition at the Autumn Jubilee and win the title of Marble Queen, but first she must convince Mama that competing with boys is okay.Marbles (Game)Juvenile fiction.FamiliesIdahoJuvenile fiction.Sex roleJuvenile fiction.ContestsJuvenile fiction.IdahoHistory20th centuryJuvenile fiction.bp2012-12-28aC0NTG",null,null,"OCLC","o766606333"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"57","IMPORT-1377205600.55222","01350cam a2200385Ia 4500ocn795173674OCoLC20121228093701.0120610s2012 nvua c 000 1 eng d97807614632690761463267(OCoLC)795173674BTCTAengBTCTABDXCO2OCLCOL@LNTGUtOrBLWNTGAPZ7.B642534Sp 2012[Fic]23J BONNETT-RAMPERSAUDBonnett-Rampersaud, Louise.The spring un-fair /by Louise Bonnett-Rampersaud ; pictures by Adam McHeffey.Spring unfair1st ed.Las Vegas, Nev. :Amazon Children's Publishing,c2012.111 p. :ill. ;22 cm.Secret knock club ;#2Agnes and the other members of the Secret Knock Club decide to form a rock band and perform at a concert to raise funds to rent a dunk tank for the spring fair.FairsJuvenile fiction.Rock groupsJuvenile fiction.ClubsJuvenile fiction.SchoolsJuvenile fiction.McHeffey, Adam James.Bonnett-Rampersaud, Louise.Secret knock club ;2.bp2012-12-28aC0NTG",null,null,"OCLC","o795173674"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"58","IMPORT-1377205600.55222","01421cam a2200397 a 4500ocn781555963OCoLC20130614123746.0120322s2012 miub c 000 1 eng 20120058639781585368006 (hard cover)1585368008 (hard cover)(OCoLC)781555963DLCengDLCIG#BTCTABDXIK2UPZBKXQBXYDXCPNTGUtOrBLWlcacpccNTGAPZ7.D1644Qu 2012[Fic]23J DALLASDallas, Sandra.The quilt walk /by Sandra Dallas.Ann Arbor, MI :Sleeping Bear Press,c2012.213 p. :map ;21 cm.\"Based on a story in The quilt that walked to Golden\"--T.p. verso.Ages 9 and up.Ten-year-old Emmy Blue learns the true meaning of friendship--and how to quilt--while making a harrowing wagon journey from Illinois to Colorado with her family in the 1860s.Wagon trainsJuvenile fiction.Frontier and pioneer lifeJuvenile fiction.QuiltingJuvenile fiction.FriendshipFiction.FriendshipJuvenile fiction.Dallas, Sandra.Quilt that walked to Golden.jab2012-12-28aC0NTG",null,null,"OCLC","o781555963"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"59","IMPORT-1377205600.55222","01645cam a2200421 a 4500ocn776523388OCoLC20130614123746.0120821s2012 nyu c 000 1 eng 2012028738016126503Uk9780316205962 (hbk.)0316205966 (hbk.)(OCoLC)776523388DLCengDLCIG#BTCTABDXUKMGBOCOB@LOCLCONTGUtOrBLWNTGAPZ7.S456922Smp 2012[Fic]23J SELFORSSelfors, Suzanne.Smells like pirates /by Suzanne Selfors.1st ed.New York :Little, Brown,2012.369 p. ;21 cm.Smells like dog\"Homer thought membership in L.O.S.T., the mysterious Society of Legends, Objects, Secrets, and Treasures, would help him find pirate Rumpold Smeller's missing treasure. But when Homer's enemy, Lorelei, forms an evil organization called FOUND, Homer and Dog face an impossible decision: Work with Lorelei to find the prize once and for all, or abandon their lifelong quest to locate the treasure\"--Provided by publisher.Adventure and adventurersFiction.DogsFiction.PiratesJuvenile fiction.Secret societiesJuvenile fiction.Mystery and detective stories.Adventure stories.DogsJuvenile fiction.Selfors, Suzanne.Smells like Dog.vf2012-12-28aC0NTG",null,null,"OCLC","o776523388"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"60","IMPORT-1377205600.55222","02025cam a2200457 a 4500ocn709407560OCoLC20130614123746.0110223s2012 mnuab c b 000 1 eng 20110051139780761370901 (lib. bdg. : alk. paper)0761370900 (lib. bdg. : alk. paper)(OCoLC)709407560DLCengDLCIG#BTCTABDXYDXCPMLYNTGUtOrBLWlcacpccNTGAPZ7.7.S46Shh 2012741.5/97322J SHAWShaw, Murray.Sherlock Holmes and the adventure of the cardboard box /based on the stories of Sir Arthur Conan Doyle ; adapted by Murray Shaw and M.J. Cosson ; illustrated by Sophie Rohrbach and JT Morrow.Minneapolis :Graphic Universe,c2012.48 p. :chiefly col. ill., col. map ;25 cm.On the case with Holmes and Watson ;#12Includes bibliographical references (p. 47).Retold in graphic novel form, Sherlock Holmes investigates when a spinster receives a package in the mail containing two severed ears. Includes a section explaining Holmes's reasoning and the clues he used to solve the mystery.Doyle, Arthur Conan,1859-1930.Adventure of the cardboard boxAdaptations.Holmes, Sherlock (Fictitious character)Comic books, strips, etc.Watson, John H. (Fictitious character)Comic books, strips, etc.Detective and mystery comic books, strips, etc.Graphic novels.Comic books, strips, etc.Cosson, M. J.Rohrbach, Sophie,ill.Morrow, J. T.,ill.Doyle, Arthur Conan,1859-1930.Adventure of the cardboard box.Shaw, Murray.On the case with Holmes and Watson ;#12.slh 20130111aC0NTG",null,null,"OCLC","o709407560"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"61","IMPORT-1377205600.55222","01172cam a2200349 a 4500ocn728102161OCoLC20121228202310.0110528s2012 mnuab c b 001 0 eng 20110210189781429676458 (library binding)1429676450 (library binding)(OCoLC)728102161DLCengDLCBTCTABDXHBPOCLCONTGUtOrBLWpccNTGAQL737.C23D66 2012599.75/5523J599.7555 DORDorisi-Winget, Dianna.Snow leopards /by Dianna Dorisi-Winget ; consultant, Christina Simmons.Mankato, Minn. :Capstone Press,c2012.32 p. :col. ill., col. map ;24 cm.Edge books. Big catsIncludes bibliographical references (p. 31) and index.Explores the habitat, life cycle, physical characteristics, and behavior of snow leopards.Snow leopardJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG",null,null,"OCLC","o728102161"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"62","IMPORT-1377205600.55222","01172cam a2200361 a 4500ocn727357085OCoLC20130614123746.0110524s2012 mnuab c b 001 0 eng 20110210167811357099781429676465 (library binding : alk. paper)1429676469 (library binding : alk. paper)(OCoLC)727357085(OCoLC)781135709DLCengDLCBTCTABDXHBPOCLCONTGUtOrBLWpccNTGAQL737.C23H44 2012599.75623J599.756 HEGHegel, Claudette.Tigers /by Claudette Hegel Edge Books.Mankato, Minn. :Capstone,2012.32 p. :col. ill., col. map ;24 cm.Edge books: big catsIncludes bibliographical references and index.Explores the habitat, life cycle, physical characteristics, and behavior of tigers.TigerJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG",null,null,"OCLC","o727357085"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"63","IMPORT-1377205600.55222","01084cam a2200349 a 4500ocn708357943OCoLC20121228200031.0110321s2012 mnua c b 001 0 eng 20110108309781429676434 (library binding)1429676434 (library binding)(OCoLC)708357943DLCengDLCBTCTABDXQBXHBPOCLCONTGUtOrBLWpccNTGAQL737.C23G347 2012599.75722J599.757 GAGGagne, Tammy.Lions /by Tammy Gagne.Mankato, Minn. :Capstone,2012.32 p. :col. ill. ;24 cm.Edge books. Big catsIncludes bibliographical references (p. 31) and index.Explores the habitat, life cycle, physical characteristics, and behavior of lions.LionJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG",null,null,"OCLC","o708357943"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"64","IMPORT-1377205600.55222","01098cam a2200349 a 4500ocn708243787OCoLC20130614123746.0110321s2012 mnuab c b 001 0 eng 20110108259781429676410 (library binding)1429676418 (library binding)(OCoLC)708243787DLCengDLCBTCTABDXGO3OCLCONTGUtOrBLWpccNTGAQL737.C23G34 2012599.75/922J599.759 GAGGagne, Tammy.Cheetahs /by Tammy Gagne.Mankato, Minn. :Capstone,2012.32 p. :col. ill., col. map ;24 cm.Edge books. Big catsIncludes bibliographical references (p. 31) and index.Explores the habitat, life cycle, physical characteristics, and behavior of cheetahs.CheetahJuvenile literature.Edge books.Big cats.vf2012-12-28aC0NTG",null,null,"OCLC","o708243787"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"65","IMPORT-1377205600.55222","01957cam a2200481Ia 4500ocn796754518OCoLC20130614123746.0120626s2012 nyua c 000 0 eng 9780345525871 (pbk.)0345525876 (pbk.)(OCoLC)796754518OWLOWLBKXNTGUtOrBLWNTGAPN6728.G28D3948 2012741.5/6/97323J741.56973 DAVDavis, Jim,1945 July 28-Garfield.SelectionsGarfield takes his licks /by Jim Davis.Ballantine Books trade pbk. ed., 1st colorized ed.New York :Ballantine,2012.94 p. :chiefly col. ill. ;23 cm.Garfield classics ;24\"Originally published in slightly different form in the United States by Ballantine Books ... in 1993\"--T.p. verso.\"His 24th book\"--Cover.Colorized versions of Garfield comic strips, featuring the adventures of Garfield the cat, Odie the dog, and their owner Jon.Garfield (Fictitious character)Comic books, strips, etc.Juvenile fiction.CatsComic books, strips, etc.Juvenile fiction.Human-animal relationshipsComic books, strips, etc.Juvenile fiction.Garfield (Fictitious character)Juvenile fiction.CatsFiction.Human-animal relationshipsJuvenile fiction.Humorous storiesJuvenile literature.Cartoons and comics.Humorous fiction.gsafdComic books, strips, etc.gsafdDavis, Jim,1945 July 28-Garfield classics ;24.Publisher descriptionhttp://catdir.loc.gov/catdir/description/random046/92090384.htmlDDG 20120801aC0NTG",null,null,"OCLC","o796754518"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"66","IMPORT-1377205600.55222","01533cam a2200421 i 4500ocn772611097OCoLC20130614123746.0120105s2013 nyua c 000 0 eng 20120004659781619132412 (hbk. : alk. paper)1619132419 (hbk. : alk. paper)9781619132467 (pbk. : alk. paper)161913246X (pbk. : alk. paper)(OCoLC)772611097DLCrdaengDLCIG#YDXCPOCLCOIHINTGUtOrBLWNTGANC783.I57 2012743.6/5723J743.657 INSInsects /[senior editor, Heather Kissock].Learn to draw insectsNew York, NY :AV2 by Weigl,[2013]32 pages :color illustrations ;26 cm.textrdacontentunmediatedrdamediavolumerdacarrierLearn to drawWhy draw? --Insects --Meet the ant --Meet the butterfly --Meet the dragonfly --Meet the firefly --Meet the grasshopper --Meet the praying mantis --Test your knowledge of insects --Draw an environment.Insects in artJuvenile literature.DrawingTechniqueJuvenile literature.Insects in art.Juvenile literature.Kissock, Heather.Learn to draw (New York, N.Y.)jab2012-12-27aC0NTG",null,null,"OCLC","o772611097"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"67","IMPORT-1377205600.55222","01759cam a2200373Ia 4500ocn793222870OCoLC20130614123746.0120503s2012 nvu e 000 1 eng d97816121843711612184375(OCoLC)793222870BTCTAengBTCTABDXHQBMR0YDXCPOCPIZ8NTGUtOrBLWe-fi---NTGA[Fic]M LEHTOLAINENLehtolainen, Leena.My first murder /Leena Lehtolainen ; translated by Owen F. Witesman.Las Vegas, NV :AmazonCrossing,2012.242 p. ;21 cm.Maria Kallio thriller\"A Maria Kallio thriller\" -- Cover.Maria Kallio has just been assigned her first murder investigation. To prove to herself and her squad that she has what it takes to be a detective, she'll have to solve the death of Tommi Peltonen. Found floating facedown at the water's edge of his Helsinki villa, Tommi had invited his choir group to spend a weekend at his retreat. But beneath the choir's seemingly tight-knit bonds seethed bitter passion and jealousy. As Maria sets out to determine the difference between friends and foes, she uncovers the victim's unsavory past - and motives for all seven suspects. Now it's up to her to untangle a complex set of clues before the killer strikes again.MurderInvestigationFiction.FinlandFiction.Suspense fiction.Mystery fiction.gsafdWitesman, Owen F.Lehtolainen, Leena.Maria Kallio thriller.JL2012-12-28aC0NTG",null,null,"OCLC","o793222870"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"68","IMPORT-1377205600.55222","01363cam a2200373Ia 4500ocn780479943OCoLC20121228210451.0120315s2012 azu e 000 1 eng d 2012910475819655125819655526978146420024314642002469781464200267 (trade pbk.)1464200262(OCoLC)780479943(OCoLC)819655125(OCoLC)819655526BTCTAengBTCTABDXJCWM$KUUCOCLCONVCLEBMR0NTGUtOrBLWNTGAPS3612.O2485J35 2012813/.623M LOCKELocke, Hillary Bell.Jail coach :a Jay Davidovich mystery /Hillary Bell Locke.1st ed.Scottsdale, AZ :Poisoned Pen Press,c2012.249 p. ;23 cm.When a high-profile actor is sentenced to a brief prison term for a second DUI offense, former soldier and current corporate insurance loss minimizer Jay Davidovich hires a jail coach to ensure that the actor will be able to return to work upon his release.VeteransFiction.Hollywood (Los Angeles, Calif.)Fiction.Mystery fiction.jab2012-12-28aC0NTG",null,null,"OCLC","o780479943"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"69","IMPORT-1377205600.55222","01799cam a2200397Ia 4500ocn779863792OCoLC20121228163023.0120301s2012 enk e 000 1 eng d016044313Uk8109057199780727881991 (hbk. : alk. paper)072788199X (hbk. : alk. paper)(OCoLC)779863792(OCoLC)810905719UKMGBUKMGBBDXBTCTAOCLCOBULIK2LEBGO3VP@CGPNTGUtOrBLWe-uk-enNTGAPR6063.A833S66 2012823/.9223M MASTERSMasters, Priscilla.Smoke alarm :a Martha Gunn mystery /Priscilla Masters.1st world ed.Sutton, Surrey, England :Severn House,2012.214 p. ;23 cm.Martha Gunn mysteryA deadly fire reunites coroner Martha Gunn with Detective Inspector Alex Randall. When firemen are called to an intense blaze at the Grange in Melverley, they find the bodies of Christie Barton, her daughter, Adelaide, and father-in-law, William, along with evidence that suggests the fire was started deliberately. Detective Inspector Alex Randall enlists the help of coroner Martha Gunn, but the puzzle deepens with a second house fire - the occupant, retired nurse Monica Deverill, is missing. Where is she, and what links the two fires? The answers lie in a secret buried in the past.CoronersFiction.Missing personsFiction.Shrewsbury (England)Fiction.Mystery fiction.gsafdMasters, Priscilla.Martha Gunn mystery.vf2012-12-28aC0NTG",null,null,"OCLC","o779863792"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"70","IMPORT-1377205600.55222","00765nam a2200289Ia 4500ocn820335354OCoLC20130614123747.0121205s2012 xx e 000 0 eng d97814776804451477680446(OCoLC)820335354HBPHBPNTGUtOrBLWNTGA813.623M REDHEADRedhead, Doris.The death of Amber Torley /Doris Redhead.[S.l.] :Doris Redhead,c2012.346 p. ;23 cm.TeenagersFiction.Mystery fiction.gsafdFathers and daughtersFiction.bp2012-12-28aC0NTG",null,null,"OCLC","o820335354"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"71","IMPORT-1377205600.55222","02569cam a2200421Ii 4500ocn774491473OCoLC20121228160635.0120126s2012 ohua e b 001 0 eng d016152836Uk9781599635965 (paper)1599635968 (paper)(OCoLC)774491473BTCTAengBTCTABDXUKMGBrdaDADQBXYDXCPOHRRHOJ4NTGUtOrBLWNTGAMT67.S657 2012782.42123R070.5794 TWO 20132013 songwriter's market /Roseann Biederman, editor.Songwriter's market36th annual edition.Cincinnati, Ohio :Writers Digest Books,[2012]©2012363 pages :illustrations ;23 cmtexttxtrdacontentunmediatednrdamediavolumencrdacarrierIncludes bibliographical references and indexes.\"The most trusted guide to getting published\"--Cover.Getting started. How to use Songwriter's market ;Where should I send my songs? ;Demo recordings ;How do I submit my demo? ;How do I avoid the rip-offs? ;Submission strategies --Music biz basics. Royalties ;Copyright ;Career songwriting ;Contracts --Articles & interviews. Mitch Goldfarb /Janice Gable Bashman ; Motion creates e-motion /Pat Pattison ; Amy Stroup /Marielle Murphy ; Writing songs for the commercial market /C.J. Watson ; Social media /David McPherson ; Hillary Scott /Annie Downs ; Learning to say no /Pat Pattison ; Songwriting grants /David McPherson ; Baby boomers and songwriting /Doris Bloodsworth ; Andy Hunt /Adria Haley ; What to do when a song isn't working /C.J. Watson ; Getting through the doors /John Braheny --Managing your work. Songwriting calendar --Markets. Music publishers ;Record companies ;Record producers ;Managers & booking agents ;Music firms ;Play producers & publishers ;Classical performing arts ;Contests & awards --Resources. Organizations ;Workshops & conference ;Retreats & colonies ;Venues ;State & provincial grants ;Publications of interest ;Websites of interest ;Glossary.Popular musicWriting and publishing.Popular musicMarketingDirectories.Music tradeDirectories.Biederman, Roseann S.,editor.jab2012-12-28aC0NTG",null,null,"OCLC","o774491473"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"72","IMPORT-1377205600.55222","01998cam a2200445Ia 4500ocn788284684OCoLC20121228002712.0120419s2012 onc e 000 1 eng d2012009598X9780373696475 (pbk.)0373696477 (pbk.)(OCoLC)788284684BTCTAengBTCTABDXIHIJBLY32OCLCQMQPNLCNTGUtOrBLWn-us-laNTGAPS3604.E44374R43 2012813.623ROM DELEONDeLeon, Jana.The reckoning /Jana DeLeon.Don Mills, Ont. :Harlequin,[2012], ©2012.217 p. ;17 cm.texttxtrdacontentunmediatednrdamediavolumencrdacarrierMystere ParishHarlequin intrigue ;1380\"In a tiny bayou town with far too many wicked secrets, the sudden disappearance of a six-year-old girl is a mystery Sheriff Holt Chamberlain is determined to solve. But teaming up with Alexandria Bastin is a complication he didn't expect. Nor is he prepared to collide with the dark side of Cajun culture--and his own troubled past. The frantic search leads them to a place said to hold magic, an eerie island where Alex is also working her charm on him. At one time, he'd been forced to leave her brokenhearted. And now, dangerously close to the truth, he'll do anything to protect her from the evil that surrounds them--an evil that might hold the key to sending a little girl home\"--Publisher.SheriffsFiction.Women psychiatristsFiction.LouisianaFiction.Missing childrenFiction.Romantic suspense fiction.gsafdDeLeon, Jana.Mystere Parish.Harlequin intrigue ;1380.vf2012-12-27aC0NTG",null,null,"OCLC","o788284684"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"73","IMPORT-1377205600.55222","02174cam a2200433Ia 4500ocn796757680OCoLC20121228194521.0120622s2012 onc e 000 1 eng d9780373885596 (pbk.)0373885598 (pbk.)(OCoLC)796757680BTCTAengBTCTABDXIHIIUKCGPNTGUtOrBLWNTGA813/.0850823ROM HOLIDAYHoliday with a vampire 4 /Susan Krinard, Theresa Meyers and Linda Thomas-Sundstrom.Don Mills, Ont., Canada :Harlequin,c2012.363 p. ;17 cm.Harlequin nocturne ;149Includes Heather Graham's The gatekeeper, the prequel to a new quartet, The keepers: L.A.Halfway to dawn: \"Amid a supernatural war between the races, can Fiona, captain of the human special forces, and Kain, a compelling fugitive from the vampire army, risk working together to attain peace on earth?\"--Publisher.The gift: \"All Cullen McCormack wants for Christmas is the missing locket that holds the key to his immortality. But will Angelica, the woman who now flaunts it about her exquisite neck, also claim his vampire heart?\"--Publisher.Bright star: \"Immortal Dylan McCay has vowed to protect the secret origins of his species at all costs--until he meets Savannah, a passionate astronomer on the brink of discovering the elusive Christmas Star\"--Publisher.Halfway to dawn /Susan Krinard --Gift /Theresa Meyers --Bright star /Linda Thomas-Sundstrom --Gatekeeper /Heather Graham.VampiresFiction.ImmortalityFiction.Man-woman relationshipsFiction.Paranormal romance stories.Christmas stories.Krinard, Susan.Halfway to dawn.Meyers, Theresa.Gift.Thomas-Sundstrom, Linda.Bright star.Graham, Heather.Gatekeeper.Harlequin nocturne ;149.JL2012-12-28aC0NTG",null,null,"OCLC","o796757680"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"74","IMPORT-1377205600.55222","02174cam a2200409Ia 4500ocn794711954OCoLC20121228192719.0120603s2012 nyub e 000 f eng d97814555143591455514357(OCoLC)794711954BTCTAengBTCTAOMPMR0WIMGO3UPZJO3NYPOCLCONTGUtOrBLWe-uk-stNTGAPS3569.C695L35 2012813/.5423;ROM SCOTTScott, Amanda,1944-The Laird's choice /Amanda Scott.1st ed.New York :Forever,2012.xii, 386 p. :map ;18 cm.Lairds of the Loch\"Historical romance\"--Spine.Includes an excerpt from the author's book The Knight Temptress: p. [361]-386.\"Lady Andrena MacFarlan has been different since the day she was born. Possessing the power to sense others' most intimate desires, she knows her duty is to marry the man who will take the MacFarlan name as his own and help her father regain the chiefdom of their clan. But her unique gifts don't prepare her for the day when a mighty warrior suddenly enters her life. The attraction between them is undeniable -- and insatiable. Hunted by brutal enemies, the wounded Magnus Galbraith washes up on MacFarlan land where he is rescued by a laird's lovely daughter. Andrena is like no one Magnus has ever known. She has the uncanny ability to both calm and enflame him in ways he never dreamed possible. But she has other unknown-and dangerous-powers. Now, as Magnus seeks to avenge a brother and protect a king, the young beauty could prove his greatest ally-or his ultimate undoing\" -- p. [4] of cover.Man-woman relationshipsFiction.ScotlandHistory15th centuryFiction.Highlands (Scotland)Fiction.Love stories.gsafdHistorical fiction.gsafdScott, Amanda,1944-Lairds of the loch.AMW2012-12-28aC0NTG",null,null,"OCLC","o794711954"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"75","IMPORT-1377205600.55222","01444cam a22003618a 4500ocn795168355OCoLC20121228195332.0120912s2013 nyu e 000 1 eng 20120346709780425261224 (hardcover : alk. paper)0425261220 (hardcover : alk. paper)(OCoLC)795168355DLCengDLCBTCTAOCLCOUPZIUKJAONTGUtOrBLWNTGAPR6052.A849D63 2013823/.91423S BAXTERBaxter, Stephen.Doctor Who :the wheel of ice /Stephen Baxter.Wheel of iceAce hardcover ed.New York :Ace Books,2013.311 p. ;24 cm.Aboard the Wheel, a ring of ice and steel turning around a moon of Saturn and home to a mining colony supplying a resource-hungry Earth, the [2nd] Doctor, Jamie and Zoe become enmeshed in a critical situation. Suspected of sabatoge, they soon find themselves caught in a mystery that goes right back to the creation of the solar system. A mystery that could kill them all.Doctor Who (Fictitious character)Fiction.Science fiction.Science fiction.gsafdDoctor Who (Television program : 1963-1989)bp2012-12-28aC0NTG",null,null,"OCLC","o795168355"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"76","IMPORT-1377205600.55222","01332cam a2200409Ii 4500ocn819325422OCoLC20121228202524.0121126s2013 nyu e 000 1 eng d97803455115080345511506(OCoLC)819325422UPZengrdaUPZIUKIEPNTGUtOrBLWNTGAPS3576.A33S36 2012813/.5423S ZAHNZahn, Timothy,author.Star Wars.Scoundrels /Timothy Zahn.ScoundrelsNew York :Del Rey/Ballantine Books,2013.443 pages ;25 cmtextrdacontentunmediatedrdamediavolumerdacarrier\"Lucas books.\"Han Solo, Chewbacca, and Lando Calrissian work together on a potentially lucrative heist in the hopes of paying of Jabba the Hutt's bounty on Han's head.Solo, Han (Fictitious character)Fiction.Calrissian, Lando (Fictitious character)Fiction.Life on other planetsFiction.Star Wars fiction.Star Wars fiction.Science fiction.gsafdbp2012-12-28aC0NTG",null,null,"OCLC","o819325422"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"77","IMPORT-1377205600.55222","03358ccm a2200397Ia 4500ocm48527791 OCoLC20121228204514.0011204s1999 cauzzz e n zxx d6430950599780769292649076929264X654979008026AF9948Alfred Publ Co.(OCoLC)48527791(OCoLC)643095059SDEengSDEBAKEROCPSINLBDEBBGOCLGK8BDXYDXCPNTGUtOrBLWNTGAM32.8.C6C6 1999786.2Complete21LR 56980rvkSCO 786.2164 COMComplete advanced piano solos :music for all occasions /[Dan Coates, arranger].Van Nuys, CA :Alfred Pub. Co.,c1999.304 p. of music ;31 cm.\"The professional touch\"--Cover.Angel eyes --Anne of Green Gables. Anne's theme --Anywhere the heart goes (Meggie's theme) --Arthur's theme (Best that you can do) --As time goes by --Ashokan farewell --Beauty and the beast --Because you loved me --Can you read my mind? (Love theme from \"Superman\") --Canon in D /Pachelbel --The colors of my life --Colors of the wind --Desperado --A dream is a wish your heart makes --Evergreen --Favorite son --Forever --Friends & lovers (Both to each other) --From a distance --The greatest love of all --Happy birthday to you --Heart --Hey there --The homecoming --How could I ever know? --How do I live --How do you keep the music playing? --I believe I can fly --I can love you like that --(Everything I do) I do it for you --I don't want to miss a thing --I say a little prayer --I swear --I will always love you --If my friends could see me now? --If you believe --in this life --Karen's theme --Kei's song.La vie en rose --Love and marriage --Love solo --May you always --Miss Celie's blues --Misty --My one true friend --My unknown someone --Noelle's theme (The other side of midnight) --Oh! What it seemed to be --Once before I go --One moment in time --Open arms --Over the rainbow --The prayer --Ragtime --The rose --Saving all my love for you --Send in the clowns --Separate lives (Love theme from \"White nights\") --She loves me --Song from MASH (Suicide is painless) --Star Wars main theme --Summer me, winter me (Theme from \"Picasso summer\") --Tears in heaven --Tell him --That's what friends are for --Theme from Ice Castles (Through the eyes of love) --Theme from New York, New York --Time to say goodbye --Tonight I celebrate my love --Un-break my heart --Up where we belong --Valentine --We've got tonight --What's new? --The wind beneath my wings --You can always count on me.Piano music, Arranged.Popular instrumental music.Coates, Dan.Online version:Complete advanced piano solos.Miami, FL : Warner Bros. Publications, c1999(OCoLC)647113373Table of contentshttp://bvbr.bib-bvb.de:8991/F?func=service&doc_library=BVB01&doc_number=017759340&line_number=0001&func_code=DB_RECORDS&service_type=MEDIAcsr2012-12-28cC0NTG",null,null,"OCLC","o48527791"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"78","IMPORT-1377205600.55222","03288ccm a2200505Ia 4500ocn648933639OCoLC20130614123747.0100715r20101971xxumuz e abei n zxx d9780739073100073907310903808140215436268Alfred Music Pub. Co.(OCoLC)648933639BTCTAengBTCTAFQGOHXVP@NYPNTGUtOrBLWw9x1rgmrwzka01NTGAM22.JSCO 786.21645 JOPJoplin, Scott,1868-1917.Piano musicComplete piano works /Scott Joplin ; edited by Vera Brodsky Lawrence ; editorial consultant, Richard Jackson ; introduction by Rudi Blesh.Complete piano works :rags, waltzes, marchesRags, waltzes, marches[New York] :New York Public Library ;Van Nuys, CA :[Distributed by] Alfred Music Pub. Co.,c [2010?], c1971.xl, [6], 327 p. of music :ill., facsims., ports. ;31 cm.\"Produced by The New York Public Library in conjunction with Belwin-Mills Publishing Corp.\"--T.p.Originally published in 1971 under the title: The collected works of Scott Joplin.Includes editor's note and introduction in English.Rollography of Joplin works: p. 317-318; discography of 78 rpm records: p. 319-321; selective discography of 33 1/3 rpm records: p. 322-324.Includes bibliographical references and index.Original works.Great crush collision --Combination march --Harmony Club waltz --Original rags /arranged by Charles N. Daniels --Maple leaf rag --Peacherine rag --Augustan Club waltz --The easy winners --Cleopha --A breeze from Alabama --Elite syncopations --The entertainer --March majestic --The strenuous life --Weeping willow --Palm leaf rag --The favorite --The sycamore --The Cascades --The chrysanthemum --Bethena --Rosebud --Leola --Binks' waltz --Eugenia --Antoinette --Ragtime dance --Gladiolus rag --Nonpareil (None to equal) --Sugar cane --Pine apple rag --Wall Street rag --Solace --Pleasant moments --Country club --Euphonic sounds --Paragon rag --Stoptime rag --Scott Joplin's new rag --Magnetic rag --Reflection rag.Collaborative works.Swipesy /with Arthur Marshall --Sunflower slow drag /with Scott Hayden --Something doing(with Scott Hayden) --Lily Queen(with Arthur Marshall) --Heliotrope bouquet /with Louis Chauvin --Felicity rag /with Scott Hayden --Kismet rag /with Scott Hayden.Miscellaneous works.School of ragtime --Sensation /by Joseph F. Lamb ; arranged by Scott Joplin --Silver swan rag /attributed to Scott Joplin.Addenda (1981).Fig leaf rag --Rose leaf rag --Searchlight rag.Piano music (Ragtime)Marches (Piano)Waltzes.Piano music.Lawrence, Vera Brodsky.Jackson, Richard,1936-Blesh, Rudi,1899-1985.csr2012-12-28cC0NTG",null,null,"OCLC","o648933639"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"79","IMPORT-1377205600.55222","01485cam a2200421 a 4500ocn778421992OCoLC20130614123747.0120323s2012 nyu d 000 1 eng 20120063279780805089516 (hbk.)0805089519 (hbk.)9780805096361 (ebk.)0805096361 (ebk.)(OCoLC)778421992DLCengDLCIG#BTCTABDXOCPOCLCONTGUtOrBLWNTGAPZ7.B52859Mi 2012[Fic]23Y BJORKMANBjorkman, Lauren.Miss Fortune Cookie /Lauren Bjorkman.1st ed.New York :Henry Holt,2012.279 p. ;23 cm.Erin, a non-Chinese teenager living in San Francisco's Chinatown, ghostwrites an online advice column, but when a reply to her ex-best friend backfires, Erin's carefully constructed life takes a crazy spin.Advice columnsJuvenile fiction.Interpersonal relationsJuvenile fiction.FriendshipJuvenile fiction.Chinese AmericansJuvenile fiction.Chinatown (San Francisco, Calif.)Juvenile fiction.San Francisco (Calif.)Juvenile fiction.FriendshipFiction.Chinese AmericansFiction.vf2012-12-28aC0NTG",null,null,"OCLC","o778421992"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"80","IMPORT-1377205600.55222","01238cam a2200349Ia 4500ocn813858201OCoLC20121228090652.0121009s2013 nyub d 000 f eng d016191174Uk9780758281463 (pbk.)0758281463 (pbk.)(OCoLC)813858201UKMGBengUKMGBJQWZS3NTGUtOrBLWNTGAPZ7.E4853Cr 2013813.623Y ESTEPEstep, Jennifer.Crimson frost :a Mythos Academy novel /Jennifer Estep.New York :KTeen,c2013.389 p. :map ;21 cm.A Mythos Academy novelArrested in the middle of her first date with Logan and wrongly accused of helping the Reapers free the evil god Loki, Gwen is shocked to learn that her accuser is Logan's father and that everyone at Mythos Academy believes she is guilty.Frost, Gwen (Fictitious character)Fiction.Paranormal fiction.Young adult fiction.Estep, Jennifer.Mythos Academy novel.bp2012-12-28aC0NTG",null,null,"OCLC","o813858201"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"81","IMPORT-1377205600.55222","01391cam a2200409Ia 4500ocn810191754OCoLC20121228171237.0120918s2012 nyu e 000 1 eng d016167443Uk77841645797800619627070061962708(OCoLC)810191754(OCoLC)778416457UPZengUPZBTCTABDXUKMGBXY4YDXCPIK2VP@ZP7NTGUtOrBLWn-us-nyNTGAPZ7.G53887Luc 2012[Fic]23Y GODBERSENGodbersen, Anna.The lucky ones :a bright young things novel /Anna Godbersen.1st ed.New York :Harper,c2012.375 p. ;22 cm.Bright young things novelNew York City's latest It Girl, Cordelia Grey, is flying high with celebrity pilot Max Darby. But such a public relationship could expose some very personal secrets.Social classesJuvenile fiction.SecretsFiction.WealthJuvenile fiction.FriendshipJuvenile fiction.New York (N.Y.)History1898-1951Juvenile fiction.Godbersen, Anna.Bright young things.AMW2012-12-20aC0NTG",null,null,"OCLC","o810191754"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"82","IMPORT-1377205600.55222","01909cam a2200589 a 4500ocn709682525OCoLC20130614123747.0110804s2011 nyu d 000 1 eng 2011023301757509465978038574098203857409809780375989797037598979X(OCoLC)709682525(OCoLC)757509465DLCDLCBTCTABULTOHHBPDPLNTGUtOrBLWlcacpccNTGAPZ7.H70326Unl 2011[Fic]23Y HOLDERHolder, Nancy.Unleashed /Nancy Holder & Debbie Viguié.1st ed.New York :Delacorte Press,c2011.385 p. ;22 cm.Wolf springs chroniclesOrphaned Kat McBride, nearly seventeen, must leave California to live with her grandfather in small-town Arkansas, where she is drawn into a paranormal world of feuding werewolf clans.SupernaturalFiction.High schoolsJuvenile fiction.SchoolsFiction.WerewolvesJuvenile fiction.Moving, HouseholdJuvenile fiction.OrphansJuvenile fiction.GrandfathersJuvenile fiction.ArkansasJuvenile fiction.Paranormal fiction.High schoolsFiction.SchoolsFiction.WerewolvesFiction.Moving, HouseholdFiction.OrphansFiction.GrandfathersFiction.ArkansasFiction.Viguié, Debbie.Holder, Nancy.Wolf Springs chronicles.edl 20111205aConnexion ImportsKCLSC0NTG",null,null,"OCLC","o709682525"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"83","IMPORT-1377205600.55222","01671cam a2200493 a 4500ocn780415630OCoLC20130614123747.0120314s2012 nyu d 000 1 eng 20120047957776024029781416998075 (pbk)1416998071 (pbk)9781416998068 (hc)1416998063 (hc)9781416998105 (ebook)1416998101 (ebook)(OCoLC)780415630(OCoLC)777602402DLCengDLCIG#BTCTABDXYDXCPUPZOCLCOJBLNTGUtOrBLWpcclcacNTGAPZ7.H70326Van 2012[Fic]23Y HOLDERHolder, Nancy.Vanquished /Nancy Holder & Debbie Viguie.1st Simon Pulse ed.New York :Simon Pulse,2012.462 p. ;21 cm.Crusade trilogy ; bk. 3On the brink of the final battle against the Cursed Ones, the Salamancan hunters' internal bickering threatens their cause, and Jenn must try to rally her team while facing her own doubts, especially about her love for Antonio.VampiresJuvenile fiction.Guerrilla warfareJuvenile fiction.SupernaturalFiction.SistersJuvenile fiction.Horror stories.Paranormal fiction.Young adult fiction.Viguié, Debbie.Holder, Nancy.Crusade trilogy ;bk. 3.jab2012-12-28aC0NTG",null,null,"OCLC","o780415630"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"84","IMPORT-1377205600.55222","01517cam a2200421 a 4500ocn779266083OCoLC20130614123747.0120331s2012 nyu d 000 1 eng 20120082589781442434356 (hardcover)144243435X (hardcover)9781442434387 (ebook)1442434384 (ebook)9781442434370 (trade paper)1442434376 (trade paper)(OCoLC)779266083DLCengDLCIG#BTCTABDXYDXCPOCPOCLCONTGUtOrBLWlcacpccn-us-meNTGAPZ7.M4787928686Lo 2012[Fic]23Y MCNAMARAMcNamara, Amy.Lovely, dark and deep /Amy McNamara.1st ed.New York :Simon & Schuster Books for Young Readers,c2012.342 p. ;22 cm.Ages 14 up.In the aftermath of a car accident that kills her boyfriend and throws her carefully planned future into complete upheaval, high school senior Wren retreats to the deep woods of Maine to live with the artist father she barely knows and meets a boy who threatens to pull her from her safe, hard-won exile.Depression, MentalJuvenile fiction.GriefJuvenile fiction.MaineJuvenile fiction.vf2012-12-28aC0NTG",null,null,"OCLC","o779266083"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"85","IMPORT-1377205600.55222","02255cam a2200445Ia 4500ocn770297437OCoLC20121228182313.0120305s2012 nyu d 000 1 eng d 2011275889015951747Uk7459786969780758269249 (pbk.)0758269242 (pbk.)(OCoLC)770297437(OCoLC)745978696VPWengVPWDLCBTCTAUKMGBNCLHCOBKXBDXYDXCPJBLIUINTGUtOrBLWlccopycatNTGAPZ7.R252314Li 2012813.623Y REEDReed, Jaime.Living violet /Jaime Reed.New York :Dafina KTeen Books,c2012.viii, 311 p. ;21 cm.The Cambion chronicles ;bk. 1\"Kensington Publishing Corp.\"Includes a reading group guide.Includes an excerpt from Burning emerald.\"He's persuasive, charming, and way too mysterious. And for Samara Marshall, her co-worker is everything she wants most--and everything she most fears ... Samara Marshall is determined to make the summer before her senior year the best ever. Her plan: enjoy downtime with friends and work to save up cash for her dream car. Summer romance is not on her to-do list, but uncovering the truth about her flirtatious co-worker, Caleb Baker, is. From the peculiar glow to his eyes to the unfortunate events that befall the girls who pine after him, Samara is the only one to sense danger behind his smile. But Caleb's secrets are drawing Samara into a world where the laws of attraction are a means of survival. And as a sinister power closes in on those she loves, Samara must take a risk that will change her life forever ... or consume it\"--Publisher's description.TeenagersJuvenile fiction.High school studentsJuvenile fiction.Paranormal fiction.African American teenagersJuvenile fiction.Paranormal romance stories.Reed, Jaime.Cambion chronicles ;bk. 1.vf2012-12-28aC0NTG",null,null,"OCLC","o770297437"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"86","IMPORT-1377205600.55222","01589cam a2200457 a 4500ocn773493500OCoLC20130614123747.0120119r20122011nyu d 000 1 eng 20110465829780545429603 (hbk.)0545429609 (hbk.)97805454425100545442516(OCoLC)773493500DLCengDLCIG#BDXOCPDPLOCLCONTGUtOrBLWe-uk-wlNTGAPZ7.S43748Roc 2012[Fic]23Y SKUSESkuse, C. J.Rockoholic /C.J. Skuse.1st American ed.New York :Scholastic,2012.358 p. ;22 cm.\"First published in the United Kingdom in 2011 by Chicken House\"--T.p. verso.Sixteen-year-old Jody Flook is known for doing stupid things, but when she accidentally kidnaps her idol, rock star Jackson Gaitlin, at his only concert in the entire United Kingdom, and he does not want to leave her garage, she is in real trouble.KidnappingJuvenile fiction.FameJuvenile fiction.MusiciansJuvenile fiction.Rock musicJuvenile fiction.Best friendsJuvenile fiction.FriendshipJuvenile fiction.WalesJuvenile fiction.Best friendsFiction.FriendshipFiction.bp2012-12-28aC0NTG",null,null,"OCLC","o773493500"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"87","IMPORT-1377205600.55222","02256cam a22003738a 4500ocn401141831OCoLC20130614123748.0111128s2012 nyu d b 001 0beng 20110450499781604135220 (acid-free paper)1604135220 (acid-free paper)(OCoLC)401141831DLCengDLCBTCTABDXYDXCPOCLCOBURNTGUtOrBLWe-uk-enNTGAPR2894.F58 2012822.3/3B23YB SHAKESPEAREFoster, Brett,1973-Shakespeare's life /Brett Foster.New York :Chelsea House,c2012.256 p. :ill. (chiefly col.) ;25 cm.Backgrounds to ShakespeareA thorough biography of the Bard, featuring the latest findings from scholars. Coverage includes: Shakespeare's early years in Stratford, including his marriage to Anne Hathaway; his rise to stardom within the London theater scene; the death of his nine-year-old son, Hamnet; the writing of his greatest works, including Romeo and Juliet, Hamlet, Macbeth, and others; his retirement from the theater and move back to Stratford; and much more.--From publisher description.Shakespeare's birth and adolescence --The Royal Shakespeare Company in Stratford --Shakespeare the young man --Anne Hathaway --Shakespeare's \"lost years\" --Shakespeare's 1580s writings? --Shakespeare's beliefs --Shakespeare the young actor and writer --The commercial theaters of London --Shakespeare's stage --Shakespeare's early successes --The plays: aids for reading --Shakespeare's growing fame --Shakespeare's books --Shakespeare back in Stratford --Shakespeare's triumphs at the Globe --How to move a theater --The Globe Theatre today --Shakespeare the literary lion --An overlooked Shakespeare poem.Includes bibliographical references and index.Shakespeare, William,1564-1616.Dramatists, EnglishEarly modern, 1500-1700Biography.Backgrounds to Shakespeare.vf2012-12-28aC0NTG",null,null,"OCLC","o401141831"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"88","IMPORT-1377205600.55222","01572cam a22004458i 4500ocn810111518OCoLC20121228200117.0121029s2012 meua ed 000 1 eng 20120405479781410454034 (lg. print)1410454037 (lg. print)(OCoLC)810111518DLCengrdaDLCBTCTAYDXCPOCLCOGO3IEPZGVIK2NTGUtOrBLWpccNTGAPS3553.U75P68 2012b813/.5423LP FIC CUSSLERCussler, Clive.Poseidon's arrow /Clive Cussler and Dirk Cussler.Large print edition.Waterville, Maine :Wheeler Publishing,2012.671 pages (large print) :illustrations ;23 cm.textrdacontentunmediatedrdamediavolumerdacarrierA Dirk Pitt novelWhen a key element of a new and powerful attack submarine goes missing and ships begin disappearing in mid-ocean, NUMA director Dirk Pitt and his team embark on an international chase to discover the truth.Pitt, Dirk (Fictitious character)Fiction.Marine biologistsFiction.Large type books.Suspense fiction.gsafdAdventure fiction.gsafdCussler, Dirk.Cussler, Clive.Dirk Pitt adventure.bp2012-12-28qC0NTG",null,null,"OCLC","o810111518"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"89","IMPORT-1377205600.55222","01534cam a22004338a 4500ocn805701816OCoLC20130614123748.0120808r20122012meuaj ed 000 1 eng 20120320279781410451453 (lg. print)1410451453 (lg. print)(OCoLC)805701816DLCengDLCBTCTAYDXCPOCLCOIUKIEPIHINTGUtOrBLWpccn-us-ncNTGAPS3563.A679B89 2012813/.5423LP M MARONMaron, Margaret.The buzzard table /Margaret Maron.Large print ed.Waterville, Me. :Thorndike Press,2012.399 p. (large print) :ill., geneal. table ;23 cm.Thorndike Press large print mysteryA Deborah Knott mysteryA mysterious ornithologist staying at Mrs. Lattimore's Victorian home and doing research on Southern vultures seems familiar to Judge Deborah Knott and Sheriff's Deputy Dwight Bryant, especially after a murderer strikes.Knott, Deborah (Fictitious character)Fiction.Women judgesFiction.North CarolinaFiction.Large type books.Mystery fiction.gsafdLarge type books.Thorndike Press large print mystery series.bp2012-12-28qC0NTG",null,null,"OCLC","o805701816"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"90","IMPORT-1377205600.55222","01727cam a2200385 a 4500ocn803474001OCoLC20130614123748.0120731s2012 meu ed 000 1 eng 20120308179781410452269 (lg. print : hbk.)1410452263 (lg. print : hbk.)(OCoLC)803474001DLCengDLCIG#OCLCOYDXCPBTCTAMPCOCLCONTGUtOrBLWe-uk-enNTGAPR6037.A95S8 2012823/.91223LP M SAYERSSayers, Dorothy L.(Dorothy Leigh),1893-1957.Strong poison :a Lord Peter Wimsey mystery with Harriet Vane /Dorothy L. Sayers.Large print ed.Waterville, Maine :Thorndike Press,2012.407 p. (large print) ;23 cm.Thorndike Press large print famous authorsWhen her fiancé dies exactly as described in one of her novels, mystery writer Harriet Vane becomes the prime suspect. Can Lord Peter Wimsey find the real poisoner in time to save her from the gallows? Impossible, it seems. The Crown's case is watertight. The police are adamant. The judge's summing-up is clear: Harriet Vane is guilty. But Lord Peter is determined to find her innocent - as determined as he is to make her his wife.Wimsey, Peter, Lord (Fictitious character)Fiction.Private investigatorsEnglandFiction.Large type books.Mystery fiction.gsafdThorndike Press large print famous authors series.edl2012-12-28qC0NTG",null,null,"OCLC","o803474001"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"91","IMPORT-1377205600.55222","01334cam a2200397 a 4500ocm62343381 OCoLC20060620094516.0050324s2005 enk e 000 0 eng GBA531783bnb013175440Uk1844541290 (hbk.)UKMUKMOUNBAKERNTGUtOrBLWengfreukscpNTGA961.204209222DT236.Q2413 2005x961.204209 QADQaddafi, Muammar.Dans le concert des nations.EnglishMy vision /Muammar Gaddafi ; conversations and frank exchanges of views with Edmond Jouve ; translated into English by Angela Parfitt.London :John Blake,2005.ix, 261 p. ;24 cm.Translation of: Dans le concert des nations.Qaddafi, Muammar.Qaddafi, MuammarPhilosophy.Heads of stateLibyaBiography.LibyaForeign relations1969-LibyaPolitics and government1969-Jouve, Edmond.Parfitt, Angela..b2014243203-02-1004-17-06(2)be(2)bo(2)kt06-20-06ma-engenk03C0NTG",null,null,"OCLC","o62343381"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"92","IMPORT-1377205600.55222","01964cjm a2200469Ia 4500ocn710059261OCoLC20130614123748.0sd fungnnmmneu110323s2011 vaumunne n eng d00610583396720gtin-1461058339672026393-DSaguaro Road Records3964N2(OCoLC)710059261BTCTAengBTCTABKXTEFCPLB@LUtOrBLWcygmNTGAM2198.B55T35 2011782.5/25422CD CR BLI T93 Blind Boys of Alabama.prfTake the high road[sound recording] /Blind Boys of Alabama.[Fairfax, Va.] :Saguaro Road Records,p2011.1 sound disc :digital ;4 3/4 in.Blind Boys of Alabama; with other performers.Produced by Jamey Johnson, Chris Goldsmith, Kevin Grantt, and Chat Cromwell.Recorded principally at Ben's Studio, Nashville, Tenn.Compact disc.Take the high road(with the Oak Ridge Boys) --Jesus, hold my hand --Have thine own way, Lord(with Jamey Johnson) --I was a burden(with Lee Ann Womack) --Can you give me a drink?(with Vince Gill) --Family Bible(with Willie Nelson) --Jesus built a bridge to heaven --I know a place --Why don't you live so God can use you --Lead me home --Stand by me --I saw the light (with Hank Williams, Jr.) --The last mile of the way.Country gospel music2011-2020.localJohnson, Jamey.prfWomack, Lee Ann.prfGill, Vince.prfNelson, Willie,1933-prfWilliams, Hank,Jr.,1949-prfOak Ridge Boys.prfdjn 201204199786314208103 (BT)jC0NTG",null,null,"OCLC","o710059261"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"93","IMPORT-1377205600.55222","01440cjm a2200421Ia 4500ocn712135416OCoLC20130614123748.0sd fungnnmmneu110404s2011 tnucynne d n eng d00093624966678gtin-14093624966678523992-2Warner Bros.(OCoLC)712135416BTCTAengBTCTABKXTEFGK8UtOrBLWNTGAM1630.18.R53R53 2011782.421642781.64222CD PC RIC R92Rich, John,1974-Rich rocks[sound recording] /John Rich.Nashville :Warner Bros.,p2011.1 sound disc :digital ;4 3/4 in.Performed by John Rich.Compact disc.Lyrics in container insert.Country done come to town --You had me from hello(feat. Lil Jon) --Mack truck(feat. Kid Rock) --You rock me --Texas(feat. Cowboy Troy) --Let somebody else drive(feat. Hank Williams, Jr.).Country music2011-2020.Lil Jon,1971-prfKid Rock(Rapper)prfCowboy Troy,1970-prfWilliams, Hank,Jr.,1949-prfcme2011-10-149786314224332 (BT)jConnexion ImportsKCLSC0NTG",null,null,"OCLC","o712135416"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"94","IMPORT-1377205600.55222","01482cjm a2200373Ia 4500ocn515677946OCoLC20130614123748.0sd fsngnnmmneu100213s2010 maubgnne n eng d00011661064126gtin-1401166106412611661-0641-2Rounder Records(OCoLC)515677946BTCTAengBTCTATEFNTGUtOrBLWNTGAM1630.18.G73F36 2010782.42164222CD PC GRA F41Grascals (Musical group)prfThe famous Lefty Flynn's[sound recording] /the Grascals.Burlington, MA :Rounder Records,p2010.1 sound disc :digital ;4 3/4 in.Performed by Grascals.Compact disc.Last train to Clarksville --Son of a sawmill man --Satan and Grandma --Everytime --Out comes the sun --Blue rock slide --The famous Lefty Flynn's --My baby's waiting on the other side --My old friend the blues --Up this hill and down --I'm blue I'm lonesome(feat. Hank Williams Jr.) --Give me Jesus.Bluegrass music2001-2010.Williams, Hank,Jr.,1949-prf.b2379581509-19-1003-18-10befwkgmvsa04-29-10mj-engmau459786313524099 (BT)cmeC0NTG",null,null,"OCLC","o515677946"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"95","IMPORT-1377205600.55222","03126cjm a2200685Ia 4500ocn472481247OCoLC20130614123748.0sd fungnn|||ed091203s2010 caumun e eng d509996078802307880Grammy Recordings/Capitol50999 6 07880 2 3Grammy Recordings/Capitol50999 6 07880 23Grammy Recordings/Capitol(OCoLC)472481247Midwest Tapehttp://www.midwesttapes.comTEFMTTEFMTTEFNTGUtOrBLWpprccyNTGAM1630.18.T96 2010782.4216422CD PR GRA G80Grammy nominees 2010[sound recording].2010 Grammy nominees[Santa Monica, CA] :Grammy Recordings ;New York :Capitol,p2010.1 sound disc :digital ;4 3/4 in.Various performers.Compact disc.I gotta feeling(The Black Eyed Peas) --Poker face(Lady Gaga) --Use somebody(Kings of Leon) --You & me(Dave Matthews Band) --You belong with me(Taylor Swift) --Fallin' for you(Colbie Caillat) --You found me(The Fray) --Sober(P!nk) --My life would suck without you(Kelly Clarkson) --Hot n cold(Katy Perry) --Halo(Beyoncé) --Hometown glory(Adele) --Chicken fried(Zac Brown Band) --It happens(Sugarland) --I run to you(Lady Antebellum) --Here come goodbye(Rascal Flatts) --21 guns(Green Day) --Life in technicolor ii(Coldplay) --I'll go crazy if I don't go crazy tonight(U2) --Can't find my way home(Eric Clapton and Steve Winwood).Popular music2001-2010.Rock music2001-2010.Country music2001-2010.Grammy Awards.Lady Gaga.Swift, Taylor,1989-Caillat, Colbie.P!nk,1979-Clarkson, Kelly,1982-Perry, Katy.Beyoncé,1981-Adele,1988-Clapton, Eric.Winwood, Steve,1948-Black Eyed Peas (Musical group)Kings of Leon (Musical group)Dave Matthews Band.Fray (Musical group)Zac Brown Band.Sugarland (Musical group)Lady Antebellum (Musical group)Rascal Flatts (Musical group)Green Day (Musical group)Coldplay (Musical group)U2 (Musical group).b2359032409-20-1001-06-10ap(3)au(4)bebd(2)bo(2)bp(3)buca(4)cocr(2)dm(2)du(2)frfc(2)f3(4)fwfogr(3)iskm(4)ktkg(2)kl(2)lflh(2)mv(3)mi(2)mkncamu(2)nw(2)nb(4)ou(4)rern(2)rbsa(3)shsk(2)sw(2)snso(2)vv(2)vswc(2)wl(2)wm01-20-10mj-engcau0939786313375578 (BT)cmeC0NTG",null,null,"OCLC","o472481247"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"96","IMPORT-1377205600.55222","01397cjm a2200385Ia 4500ocn694633146OCoLC20110311153151.0sd fungnn|||eu101231s2011 xxkmunne n eng d88697446992688697 44699 2XL Recordings(OCoLC)694633146TEFMTTEFMTTEFBTCTAIMDGL9MR0UtOrBLWpprcNTGAM1630.18.A24T94 2011782.4216622CD PR ADE T99Adele,1988-21[sound recording].Twenty one[United Kingdom] :XL Recordings,p2011.1 sound disc (48 min.) :digital ;4 3/4 in.004808Adele, vocals ; with acc. musicians.Compact disc.Rolling in the deep(3:48) --Rumour has it(3:44) --Turning tables(4:10) --Don't you remember(4:03) --Set fire to the rain(4:02) --He won't go(4:38) --Take it all(3:49) --I'll be waiting(4:02) --One and only(5:49) --Lovesong(5:17) --Someone like you(4:46).Popular music2011-2020.Alternative rock music2011-2020.localSoul music2011-2020.localcme 201103119786314072902 (BT)j",null,null,"OCLC","o694633146"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"97","IMPORT-1377205600.55222","02800cgm a2200637Ia 4500ocm42581583OCoLC20130614123748.0vd bvaizq991006p19991982cau089 e vleng d0783232063978078323206502519205232320523Universal Home Video(OCoLC)42581583TOLTOLOCLCQOCLBTCTASTFZ5UYDXCPOCLCQOCLCGTEFOCLCQUMCNTGUtOrBLWengengfrespaengNTGADVD FIC DDead men don't wear plaid[videorecording] /a Universal picture ; an Aspen Film Society/William E. McEuen/David V. Picker production ; written by Carl Reiner, George Gipe, Steve Martin ; produced by David V. Picker and William E. McEuen ; directed by Carl Reiner.Universal City, CA :Universal Home Video,1999.1 videodisc (1 hr., 29 min.) :sd., b&w ;4 3/4 in.DVD, widescreen format; Dolby Digital mono.In English with optional French or Spanish subtitles; closed-captioned.Title and credits from container.Steve Martin, Rachel Ward, Reni Santoni, Carl Reiner.Director of photography, Michael Chapman; editor, Bud Molin; music, Miklos Rozsa.MPAA rating: PG.Originally released as a motion picture in 1982.Private eye Rigby Reardon investigates the death of a wealthy scientist with a little help from his \"friends.\" A new technique, recycling films of the '40s with a new story line.Special features include production notes, cast & filmmakers' bios, film highlights, theatrical trailer (2 min.) and web links.Private investigatorsDrama.ScientistsDeathDrama.MurderInvestigationDrama.Feature filmsUnited States.lcgftDetective and mystery films.lcgftComedy films.lcgftVideo recordings for the hearing impaired.lcgftVideo recordings with French language options.|2local.Video recordings with Spanish language options.|2local.Reiner, Carl,1922-Gipe, George.Martin, Steve,1945-Picker, David V.McEuen, William E.Ward, Rachel,1957-Santoni, Reni.Rózsa, Miklós,1907-1995.Universal City Studios.Universal Studios Home Video (Firm)Aspen Film Society.kh 20110512hConnexion ImportsKCLSC0NTG",null,null,"OCLC","o42581583"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"98","IMPORT-1377205600.55222","02222cgm a2200553Ia 4500ocm41443902 OCoLC20130614123748.0vd cvaizq990527p19991983cau090 e vleng d079073995X978079073995308539163752316375Warner Home Video(OCoLC)41443902KCPKCPOCLCQLEORCSBTCTAVP@OCLCQNTGUtOrBLWNTGADVD FIC MThe man with two brains[videorecording] /Warner Bros. presents ; a Carl Reiner film ; an Aspen Film Society, William E. McEuen/David V. Picker production ; written by Carl Reiner, Steve Martin, George Gipe ; produced by David V. Picker and William E. McEuen ; directed by Carl Reiner.Burbank, CA :Warner Home Video,c1999.1 videodisc (90 min.) :sd., col. ;4 3/4 in.DVD, full screen presentation; Dolby Digital.Closed-captioned.Steve Martin, Kathleen Turner, David Warner.MPAA rating: R.Originally released as a motion picture in 1983.A wacky brain surgeon mourns for his wife and then falls for a beauty who hides a heart of stone. The situation is hopeless until his oddball research offers a ray of hope.For specific features see interactive menu.SurgeonsDrama.WifeDeathDrama.WidowersDrama.Feature filmsUnited States.lcgftComedy films.lcgftVideo recordings for the hearing impaired.lcgftMartin, Steve,1945-Turner, Kathleen,1954-Warner, David,1941-Reiner, Carl,1922-drtGipe, George.Picker, David V.McEuen, William E.Warner Bros. Pictures (1969- )Warner Home Video (Firm).b2319397909-15-1008-28-09duf3fonwrhvv09-24-09mhiengcau46186726khC0NTG",null,null,"OCLC","o41443902"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"99","IMPORT-1377205600.55222","02049cgm a2200505Ka 4500ocn809868852OCoLC20121017204545.0vd mvaizq120913s2012 cau390 e vleng d826663135367SF 13536Shout! FactorySF 13537Shout! FactorySF 13538Shout! FactorySF 13539Shout! Factory(OCoLC)809868852TEFMTTEFMTTEFNTGUtOrBLWNTGAPN1969.C65S74 2012792.7/623DVD 792.76 STESteve Martin[videorecording] :the television stuff /produced by Mary Sherwood.Television stuffLos Angeles, CA :Shout! Factory,c2012.3 videodiscs (390 min.) :sd., col., b&w ;4 3/4 in.DVD, Dolby Digital.Cataloged from container.Steve Martin, featuring special guest appearances by Dan Aykroyd, Laraine Newman, Lauren Hutton, John Belushi, Bill Murray, Lynn Redgrave, Paul Simon, David Letterman, Carl Reiner, Eric Idle, and Johnny Cash.The complete long-lost television specials of the one and only 'Wild and Crazy Guy' himself, with most material not seen for over 30 years. Includes several bonus clips, two music videos, SNL appearances and acceptance speeches.Special features: new interview with Steve Martin.The stand-up specials -- The NBC specials -- Bits and pieces.Martin, Steve,1945-Stand-up comedy.American wit and humor.Documentary television programs.lcgftTelevised performances.lcgftTelevised stand-up comedy routines.lcgftSherwood, Mary.Martin, Steve,1945-Shout! Factory (Firm)djn 201210179786314766429 (BT)hC0NTG",null,null,"OCLC","o809868852"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"100","IMPORT-1377205600.55222","01327cim a2200361Ia 4500ocn181169841OCoLC20130614123748.0sd fsngnn|||eu071114s2007 nyu n b eng d978074356972907435697250-7435-6972-5Audioworks(OCoLC)181169841DV1DV1BAKEROCLCQUtOrBLWZ26ACD B MARTINMartin, Steve,1945-Born standing up[sound recording] :a comic's life /by Steve Martin.New York :Simon & Schuster,p2007.4 sound discs (4 hr.) :digital ;4 3/4 in.\"Unabridged\"--Container.Compact discs.Read by the author.Martin reflects on his over 30 years in the comedy business, from writing to performing and everywhere in between. He offers listeners a once-in-a-lifetime opportunity to look into the mind of an artist at the top of his game.Martin, Steve,1945-EntertainersUnited StatesBiography.Audiobooks.lcgft.b2418043904-21-1004-03-10rn04-03-10mnnengnyu01220578C0Z26",null,null,"OCLC","o181169841"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"101","IMPORT-1377205600.55222","01782pam a22004214a 4500ocm71790035 OCoLC20071108125905.0060921s2006 caua e b 100 0 eng 20060313641597140570 (pbk. : alk. paper)9781597140577 (pbk. : alk. paper)(OCoLC)71790035DLCDLCYDXBAKERBTCTAYDXCPNTGUtOrBLWNTGATR820.5.I58 2006779.09222779.092 INTInternational Indigenous Photographers Conference(2006 :Davis, Calif.)Our people, our land, our images :international indigenous photographers /edited by Hulleah J. Tsinhnahjinnie and Veronica Passalacqua.Berkeley, Calif. :Heyday Books,c2006.xxiii, 71 p. :ill. (some col.) ;26 cm.Product of the International Indigenous Photographers Conference held in April 2006 at the University of California, Davis and the associated exhibit shown at the university's C.N. Gorman Museum..Includes bibliographical references.Documentary photographyExhibitions.Indigenous peoples in artExhibitions.Rites and ceremoniesExhibitions.Portrait photographyExhibitions.EthnologyExhibitions.Tsinhnahjinnie, Hulleah.Passalacqua, Veronica.C.N. Gorman Museum.Table of contents onlyhttp://www.loc.gov/catdir/toc/ecip071/2006031364.html.b2148217208-31-1011-08-07bebofwre12-19-07ma-engcau00khC0NTG",null,null,"OCLC","o71790035"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"102","IMPORT-1377205600.55222","01039cam a2200337Ia 4500ocn816033399OCoLC20130318071805.0121104s2012 is e 000 0 heb d97896551223059655122301(OCoLC)816033399WEINBWEINBHLSOCLCOCOOPULNTGUtOrBLWNTGABF575.L7P45 2012ON ORDER880-01Pelled, Esther.880-02Le-orah ha-tsaḥ shel ha-metsiʼut /Ester Peled.Title on t.p. verso:In the Clear Light of Reality880-03Tel Aviv :Bavel,2012.226 p. ;22 cm.Loneliness.Psychoanalysis.Love.IsraelTel Aviv.100-01/rפלד, אסתר.245-02/rלאורה הצח של המציאות /אסתר פלד.260-03/rתל אביב :בבל,2012.aC0NTG",null,null,"OCLC","o816033399"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"103","IMPORT-1377205600.55222","02084cgm a2200565Ia 4500ocm60523483 OCoLC20130522202021.0vd cvaizu050531t20051993xxu105 e vleng d14157092549781415709252841887050159705015PBS Home Video(OCoLC)60523483EOWEOWVP@BTCTASTFOCLCQIAZUtOrBLWengengn-us---NTGABV3785.G69C78 1993262.2092269/.2/09222DVD B GRAHAMCrusade[videorecording] :the life of Billy Graham /produced by Cutting Edge Productions and WTTW/Chicago ; producer/director, Julian Norridge.Widescreen.[United States] :PBS Home Video,[2005], c1993.1 videodisc (ca. 105 min.) :sd., col. ;4 3/4 in.DVD; widescreen; Dolby digital.Closed-captioned.Narrated by Peter Graves.Executive producer, Randall Balmer.Not rated.Billy Graham has preached to more people than any other person in history. This program takes the viewer behind the scenes to meet Billy Graham himself and witness the power of the Graham organization as it mobilizes followers far and wide.Special feature: Personal visit with Billy Graham.Graham, Billy,1918-EvangelistsUnited StatesBiography.Documentary films.lcgftBiographical films.lcgftReligious films.lcgftVideo recordings for the hearing impaired.lcgftNorridge, Julian.Graves, Peter,1926-2010.Cutting Edge Productions.WTTW (Television station : Chicago, Ill.)PBS Home Video.cme2013-05-229786309100924 (BT)hC0NTG",null,null,"OCLC","o60523483"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"104","IMPORT-1377205600.55222","02741cgm a2200613Ka 4500ocm70592076 OCoLC20130614123748.0vd cvaizs060718p20061988cau320 e vleng d14198343209781419834325794051260123E2601Warner Home Video(OCoLC)70592076Midwest Tapehttp://www.midwesttapes.comTEFMTengTEFMTRCSVP@OCLCQOCLCAOCLCQUtOrBLWe-fr---NTGAPN1992.77.A456 2006791.45/7522DVD FIC A SEASON 5 PT.2'Allo 'allo (Television program).Season 5.Selections.'Allo 'allo!The complete series five part deux[videorecording] /produced and directed by David Croft ; written by Jeremy Lloyd & David Croft.Allo 'allo!The complete series five part twoAllo 'allo!The complete series 5 part 2Burbank, CA :Warner Home Video,[2006]2 videodiscs (ca. 320 min.) :sd., col. ;4 3/4 in.DVD, region 1, full screen (4:3) presentation.Closed-captioned.Title from container.Gorden Kaye, Richard Marner, Carmen Silvera, Kim Hartman, Vicki Michelle.Originally broadcast on the BBC, 1988-89.MPAA rating: Not rated.René Artois is a man on the run. Acting as a pawn for the French Resistance, he spends his days running from the Gestapo, the Generals, and naturally, from all of the women who desperately yearn for his affection.The reluctant millionaires -- A duck for launch -- The exploding bedpan -- Going like a bomb -- Money to burn -- Puddings can go off -- Landmines for London -- Flight to Geneva -- Train of events -- An enigma variation -- Wedding bloss -- Down the drain -- All in disgeese.World War, 1939-1945FranceDrama.World War, 1939-1945Underground movementsFranceDrama.Man-woman relationshipsDrama.Television programsGreat Britain.lcgftComedy television programs.gsafdVideo recordings for the hearing impaired.lcgftComedyTelevision series.migfgCroft, David,1922-2011.Lloyd, Jeremy.Kaye, Gorden,1941-Marner, Richard,1921-2004.Silvera, Carmen,1922-2002.BBC Video (Firm)Warner Home Video (Firm)jab2013-05-31hC0NTG",null,null,"OCLC","o70592076"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"105","IMPORT-1377205600.55222","02871njm a2200577Ia 4500ocn841264227OCoLC20130603174833.0g--- vlsd fungnn|m|edvd cvaizu130430s2005 ch ppnne d n chi d4710841050251PKXR0002TNT(OCoLC)841264227TEFTEFUtOrBLWchichia-ch---NTGAM1807.18.D46D36 2005782.4216/3095124/923CD PFE DEN D02 EAST ASIA880-01Deng, Lijun,1953-1995.880-02Dan dan you qing + shen qing jing xuan[sound recording] /Deng Lijun [Teresa Teng].880-03Deng Lijun dan dan you qing + shen qing jing xuan880-04Taibei Shi :Deng Lijun wen hua shi ye you xian gong si fa xing,2005.1 sound disc :digital ;4 3/4 in. +1 videodisc (4 3/4 in.)Sung in Chinese (Mandarin).Title from disc label.880-05Deng Lijun, vocals ; with various instrumental acc.880-06Zhi zuo, TNT Production Ltd.Compact disc.Lyrics inserted in container.880-07CD.Du shang xi lou --Dan yuan ren chang jiu --Ji duo chou --Fang cao wu qing --Qing ye you you --You shui zhi wo ci shi qing --Yan zhi lei --Wan ye qian sheng --Ren yue huang hun hou --Xiang kan lei yan --Yu shuo hai xiu --Si jun. xin shou lu dan qu:Tian shang tian wai tian wu ya --Bu zhao hen ji --Xiang gu shi ban wen rou --Ming yun zhi chuan --Wu ye wei feng.880-08DVD.Du shang xi lou --Si jun.Popular musicTaiwan1981-1990.Popular musicTaiwan1991-2000.Songs, ChineseTaiwan.Chinese language music recordingsMandarin.local100-01鄧麗君,1953-1995.245-02淡淡幽情+深情精選[sound recording] /鄧麗君 [Teresa Teng].246-03鄧麗君淡淡幽情 + 深情精選260-04台北市 :鄧麗君文化事業有限公司發行,2005.511-05鄧麗君, vocals ; with various instrumental acc.508-06製作, TNT Production Ltd.505-07CD.獨上西樓 --但願人長久 --幾多愁 --芳草無情 --清夜悠悠 --有誰知我此時情 --胭脂淚 --萬葉千聲 --人約黃昏後 --相看淚眼 --欲說還休 --思君. 新收錄單曲:天上天外天無涯 --不著痕跡 --像故事般溫柔 --命運之川 --午夜微風.505-08DVD.獨上西樓 --思君.cme2013-06-03jC0NTG",null,null,"OCLC","o841264227"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"106","IMPORT-1377205600.55222","01502nam a2200397Ia 4500ocn828672220OCoLC20130522140351.0130228s2012 le a e 000 0 ara d97899532644489953264449(OCoLC)828672220MCCPPMCCPPNTGUtOrBLWarafreNTGA158.123158.1 KUZ ARABIC880-01Kuzmā, Hayyām.Savoir dire non aux autres et oui à soi-même: principes premiers de toute vie épanouie.Arabic880-02Kayfa aqūl la lil-akharīn wa-naʻam la-dhātī :al-Sabīl ila ḥaȳah haniʼat /Hayyām Kuzmā.880-03Bayrūt :Hāshīt Anṭuwān,2012.156 p. :ill. ;21 cm.In Arabic.How to say no to others and yes to yourself. A way to live a happy life.Self-acceptance.Self-confidence.Assertiveness (Psychology)Self-esteem.Arabic language editionNonfiction.local100-01/rكوزما, هيام.245-02/rكيف أقول لا للآخرين و نعم لذاتي :السبيل إلى حياة هنيئة /هيام كوزما.260-03/rبىروت :حاشىت أنطوان,2012.jab2013-05-22aC0NTG",null,null,"OCLC","o828672220"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"107","IMPORT-1377205600.55222","01644cam a2200409Ia 4500ocn719365709OCoLC20130517155904.0110502s2002 ru a e d 000 0 rusod9785170671557 (AST)5170671555 (AST)9785271278747 (Astrelʹ)5271278743 (Astrelʹ)(OCoLC)719365709CLECLECLEORXNTGUtOrBLWrusengNTGACB156.S573717 2009001.94 SIT RUSSIANSitchin, Zecharia.Lost book of Enki.Russian880-01Poteri︠a︡nnai︠a︡ kniga Ėnki :vospominanii︠a︡ i prorochestva Nazemnogo Boga /Zakharii︠a︡ Sitchin ; [perevod s angliĭskogo O. V. Gorshunovoĭ].880-02Moskva :AST :Astrelʹ,c2009.350 p. :ill. ;21 cm.In Russian.Translation of: Lost book of Enki.Includes glossary.Civilization, AncientExtraterrestrial influences.Lost booksHistory.Russian language editionNonfiction.local245-01Потерянная книга Энки :воспоминания и пророчества Наземного Бога /Захария Ситчин ; [перевод с английского О. В. Горшуновой].260-02Москва :АСТ :Астрель,c2009.700-00Горшуновой, О. В.st2013-05-17aC0NTG",null,null,"OCLC","o719365709"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"108","IMPORT-1377205600.55222","01193nas a2200385Ka 4500ocn843202227OCoLC20130614123749.0130516c20139999waubr p 0 a0pan d(OCoLC)843202227NTGNTGUtOrBLWpanengNTGAPER INDIndia-USA.India-USA.India-USA connecting communitiesKent, WA :India-USA Magazine,2013-v. :ill. ;26 cm.BimonthlyIn Punjabi and English.Description based on: April-May 2013.HealthPeriodicals.MedicinePeriodicals.Self-care, HealthPeriodicals.Panjabi languagePeriodicals.Panjabi-English language editionNonfiction.localEnglish-Panjabi language editionNonfiction.localAasra Punjabi English magazine.(OCoLC)70160664$6.00 // 6(CR) 5-13 NO PUB INFO FOUND 5-13 2013 APR/MAY 5-23-13 LB//st2013-05-16bC0NTG",null,null,"OCLC","o843202227"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"109","IMPORT-1377205600.55222","01677nam a2200421Ma 4500ocn781674585OCoLC20130530150937.0120326s2012 ja e 000 0 jpn d97847612682204761268220(OCoLC)781674585TRCLSTRCLSNTGUtOrBLWjpnengNTGA158.1 SIE JAPANESE880-01Siebold, Steve.177 mental toughness secrets of the world class.Japanese880-02Ichiryū no hito ni manabu jibun no migakikata :zenbei kusshi no chōninki seminā kōshi ga denju suru jūni no seichō hōsoku /Siebold Steve;Yumiba Takashi.English title on cover :177 mental toughness secrets of the world class880-03Tōkyō :Kankishuppan,2012.237 p. ;20 cm.In Japanese.Translation of: 177 mental toughness secrets of the world class. SuccessPsychological aspects.Success.Mental efficiency.Psychology, Applied.Japanese language editionNonfiction.local880-04Yumiba, Takashi.100-01スティーブ・シーボルド.245-02一流の人に学ぶ自分の磨き方 :全米屈指の超人気セミナー講師が伝授する12の成長法則 /スティーブ・シーボルド著;弓場隆訳.260-03東京 :かんき出版,2012.700-04弓場隆.st2013-05-30aC0NTG",null,null,"OCLC","o781674585"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"110","IMPORT-1377205600.55222","01555cam a2200469 a 4500ocn841177098OCoLC20130524002337.0121205s2012 ch a e b 000 0 chi d 201248837397895705271249570527129(OCoLC)841177098TEFengTEFDLCNTGUtOrBLWlccopycata-cc---NTGABF1714.C5S457 2012133.5/925123133.59251 SHE CHINESE880-01Sheng xiao dong wu zhi tan /Zhang Zhijie zhu bian.880-02Chu ban.880-03Taibei Shi :Taiwan shang wu yin shu guan gu fen you xian gong si,2012.viii, 209 p. :ill. ;22 cm.880-04Shang wu ke pu guan ;12Includes bibliographical references.Astrology, Chinese.AnimalsChinaFolklore.Animals and civilizationChina.Chiese language editionNonfiction.local880-05Zhang, Zhijie.880-06Shang wu ke pu guan ;12.245-01生肖動物摭談 /張之傑主編.250-02初版.260-03臺北市 :臺灣商務印書館股份有限公司,2012.490-04商務科普館 ;12700-05張之傑.830-06商務科普館 ;12.csr2013-05-23aC0NTG",null,null,"OCLC","o841177098"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"111","IMPORT-1377205600.55222","01513cam a2200397Ia 4500ocn829392997OCoLC20130605182624.0130308s2012 ko a e 000 0 kor d97889605119728960511978(OCoLC)829392997MCCPPengMCCPPMCCPPUtOrBLWNTGA306.8723306.87 CHO KOREAN880-01Choe, Kwang-hyŏn.880-02Kajok ŭi tu ŏlgul =Two faces of my family : sarang hajiman sangchŏdo chugobannŭn nawa kawa kajok ŭi simriterapi /Choe Kwang-hyŏn chiŭm.Two faces of my family880-03Sarang hajiman sangch'ŏdo chugobannŭn nawa kawa kajok ŭi simriterapi880-04Chopan.880-05Sŏul :Puk'i,2012.261 p. :ill. ;21 cm.In Korean.FamiliesFamily relationships.Family crises.Korean language editionNonfiction.local100-01최광현.245-02가족의 두 얼굴 =Two faces of my family : 사랑하지만 상처도 주고받는 나와 가족의 심리테라피 /최광현 지음.246-03사랑하지만 상처도 주고받는 나와 가족의 심리테라피250-04초판.260-05서울 :부키,2012.jab2013-05-22aC0NTG",null,null,"OCLC","o829392997"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"112","IMPORT-1377205600.55222","02380cam a2200385Ia 4500ocm37920876 OCoLC20130614123749.0971111s1997 cau e 000 0 eng d 97070806 0898706408 (pbk.)()1819412BJOBJOMNJNTGOrPssOrLoB-BWaOLNUtOrBLWenggerNTGABX1751.2.R3413 1997282.092 RATRatzinger, Joseph.Salz der Erde.EnglishSalt of the earth :Christianity and the Catholic Church at the end of the millennium /Joseph Ratzinger ; an interview with Peter Seewald ; translated by Adrian Walker.Subtitle on cover:Church at the end of the millenniumSan Francisco :Ignatius Press,c1997.283 p. ;21 cm.Translation of Salz der Erde.The Catholic Faith; Words and Signs --Pt. 1.Personal Biography.Background and Vocation.The Young Professor.Bishop and Cardinal.The Prefect and His Pope --Pt. 2.Problems of the Catholic Church.Rome under Fire.On the State of the Church.The Situation in Germany.Causes of the Decline.The Mistakes of the Church.The Canon of Criticism --Pt. 3.On the Threshold of a New Era.Two Thousand Years of Salvation History - and Still No Redemption?Catharsis - A New Millennium - A Time of Testing.A \"New Springtime of the Human Spirit\" for the Third Millennium.Priorities of the Church's Development.Future of the Church - Church of the Future.The True History of the World.The highest ranking man in the Vatican next to the Pope gives an exclusive interview on the worldwide state of Catholicism to a highly regarded secular German journalist. \"Salt of the Earth\" explores a host of controversial and difficult issues facing Catholicism and Christianity at the end of the millennium.Ratzinger, JosephInterviews.Catholic ChurchDoctrinesHistory20th century.Catholic ChurchHistory20th century.Christianity20th century.Seewald, Peter..b1096058201-20-1009-01-04au(2)bebufrfwmvresh09-01-04ma-engcau027",null,null,"OCLC","o37920876"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"113","IMPORT-1377205600.55222","01157cam a22003738a 4500ocn743214099OCoLC20130614123749.0110720s2012 nyuac e b 001 0 eng 20110294379780816081592 (alk. paper)081608159X (alk. paper)(OCoLC)743214099DLCengDLCBTCTABDXYDXCPBWXOCLCOXXWELNTGUtOrBLWpccNTGABD331.B5925 201211023110 BOEBoersema, David.The Facts on File guide to philosophy.Reality, religion, and the mind /David Boersema, Kari Middleton.Reality, religion, and the mindNew York :Facts On File,c2012.x, 300 p. :ill., ports. ;24 cm.Includes bibliographical references (p. 278) and index.Reality.Metaphysics.ReligionPhilosophy.Philosophy of mind.Middleton, Kari.AMW2013-01-16aC0NTG",null,null,"OCLC","o743214099"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"114","IMPORT-1377205600.55222","01112cam a2200361 a 4500ocn743214104OCoLC20130116191141.0110720s2012 nyua e b 001 0 eng 20110294929780816084821 (alk. paper)0816084823 (alk. paper)(OCoLC)743214104DLCengDLCYDXBTCTABDXYDXCPBWXOCLCONTGUtOrBLWpccNTGABD161.B54 201212123121 BOEBoersema, David.The Facts on File guide to philosophy.Knowledge, logic, and science /David Boersema, Kari Middleton.Knowledge, logic, and scienceNew York :Facts on File,c2012.xi, 294 p. :ill. ;24 cm.Includes bibliographical references (p. 270) and index.Knowledge, Theory of.Logic.SciencePhilosophy.Middleton, Kari.AMW2013-01-16aC0NTG",null,null,"OCLC","o743214104"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"115","IMPORT-1377205600.55222","01022cam a2200337Ia 4500ocn761724599OCoLC20130614123749.0111121s2011 sp a e 000 0 spa d9788499170978 (pbk.)8499170978 (pbk.)(OCoLC)761724599JQWJQWJQWYDXCPBTCTABDXQBXUtOrBLWNTGABF1779.F4S56 2011133.3/33723133.3337 SHO SPANISHShoki, Futabei.Mejora tu vida con el feng shui /Futabei Shoki.Barcelona [Spain] :Grupo Robin Book,c2011.185 p. :ill. ;14 x 14 cm.Vital ;15In Spanish.\"Aprende a armonizar la energía que te rodea.\"--Cover.Feng shui.Spanish language editionNonfiction.localColección Vital ;15.lmc2013-01-16aC0NTG",null,null,"OCLC","o761724599"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"116","IMPORT-1377205600.55222","02085cam a2200409Ia 4500ocn820149011OCoLC20130116205611.0121126s2013 nyuaf e b 001 0 eng d80003116097814391912551439191255(OCoLC)820149011(OCoLC)800031160TOHTOHJAGBTCTAJTHYDXCPACNCGPIFKNTGUtOrBLWNTGABF311.H646 2013158.123153.723153.7 HORHorowitz, Alexandra.On looking :eleven walks with expert eyes /Alexandra Horowitz.1st Scribner hardcover ed.New York :Scribner,2013.vii, 308 p., [4] p. of plates :ill. (some col.) ;24 cm.Includes bibliographical references (p. 267-294) and index.On Looking begins with inattention. It is about attending to the joys of the unattended, the perceived 'ordinary.' Horowitz encourages us to rediscover the extraordinary things that we are missing in our ordinary activities. Even when engaged in the simplest of activities like taking a walk around the block, we pay so little attention to most of what is right before us that we are sleepwalkers in our own lives.Amateur eyes --Inanimate city : the material of the landscape. Muchness ;Minerals and biomass ;Minding our Qs ;Into the fourth dimension --Animate city : everything that won't stand still. Flipping things over ;The animals among us ;A nice place (to walk) ;The suggestiveness of thumb-nails --Sensory city : things that hum, smell, or vibrate. Seeing/not seeing ;The sound of parallel parking ;A dog's-nose view ;Seeing it.Self-consciousness (Awareness)Self-actualization (Psychology)Perception.Cognition.Awareness.Environmental psychology.edl2013-01-16aC0NTG",null,null,"OCLC","o820149011"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"117","IMPORT-1377205600.55222","01180cam a22003493 4500ocn774493028OCoLC20130614123749.0120128s2012 sp e 000 0 spa d9788497778121 (pbk.)849777812X (pbk.)(OCoLC)774493028BTCTAengBTCTAYDXCPBDXH7HUUCNTGUtOrBLWspaengNTGABL624.M86314 2012154.222154.2 MUR SPANISHMurphy, Joseph.Maximize your potential through the power of your subconscious mind for health and vitality.SpanishMaximice su potencial mediante el poder de su mente subconsciente para generar salud y vitalidad /Joseph Murphy ; [traducción, David M. George].Salud y vitalidad1. ed.Barcelona :Ediciones Obelisco,2012.194 p. ;21 cm.New Thought.HealthPsychological aspects.Spanish language editionNonfiction.localcsr2013-01-16aC0NTG",null,null,"OCLC","o774493028"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"118","IMPORT-1377205600.55222","01468cam a22003618a 4500ocn771057345OCoLC20130116163137.0120104s2012 nyua e 000 0 eng 2011052228016086892Uk97814555165991455516597(OCoLC)771057345DLCengDLCBTCTABDXYDXCPZS3UKMGBNTGUtOrBLWpccNTGABF697.5.S43B53 2012158.123158.1 BLA Black, Christa.God loves ugly :& love makes beautiful /Christa Black.1st ed.New York, NY :FaithWords,2012.xii, 205 p. :ill. ;22 cm.The love bucket :(I'm still here) --Sticks and stones :(God loves ugly) --Puddle jumping :(Black Monday) --There's always a reason : (the grass is always greener) --Jesus/Steve : (feel so good) --Monsters : (drift away) --Forty days and a horse :(California sunshine) --Lessons in love : (lonely) --The destination : (the view).Examines the author's battle with self-loathing and depression, and how her belief in God's unconditional love allowed her to love herself as well.Self-perception.Self-perceptionReligious aspects.AMW2013-01-16aC0NTG",null,null,"OCLC","o771057345"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"119","IMPORT-1377205600.55222","00865nam a2200313Ia 4500ocn813166496OCoLC20130614123749.0121016s2012 nyu e 001 0 eng d97816144817821614481784(OCoLC)813166496JRGJRGNTGUtOrBLWNTGA158.1 KOBKobrin, Neil.Emotional well-being :embracing the gift of life /Neil Kobrin.New York :Morgan James Publishing,2012.246 p. ;23 cm.Attention.Stress management.Awareness.Self-actualization (Psychology)Mindfulness-based cognitive therapy.Well-being.JL2013-01-16aC0NTG",null,null,"OCLC","o813166496"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"120","IMPORT-1377205600.55222","01984cam a2200313Ka 4500ocn794709257OCoLC20130614123749.0120602s2013 nyu e 000 0 eng d9781614483212 (pbk.)1614483213 (pbk.)9781614483229 (ebook)(OCoLC)794709257BTCTAengBTCTAYDXCPNTGUtOrBLWNTGA158.123158.1 MALMalouf, Gregory Nicholas.Silent :the power of silence /Gregory Nicholas Malouf.New York :Morgan James Pub.,2013.xxiii, 265 p. ;23 cm.\"All too many of us create an outer persona or ego self in order to cope with the fear and anxiety we feel on a daily basis. Our ego self normalizes these intense emotions and stops us from feeling anything at all as we drive ourselves towards our goals of financial status and success. In his book, Silent, Gregory Nicholas Malouf asks that we start to recognize what we feel and in doing so, face our truth. Once we are able to acknowledge, accept and forgive ourselves for being afraid, we are able to stop running away, live in the present and so create the life we most desire. In the spirit of Eckhart Tolle and Marianne Williamson, Malouf, founder of Epsilon Healing Academy, frankly reveals his journey from an abusive childhood that will shock you and move you, to his empty existence as a wealthy and highly successful entrepreneur living 'the dream'. A real account of life at its worst and the lessons Malouf learnt to transform it to life at its best, to reconnect with your true, inner self and find the power of the silence within to lead the life you most desire\"--Amazon.com.Malouf, Gregory Nicholas.Self-actualization (Psychology)Identity (Psychology)slh2013-01-16aC0NTG",null,null,"OCLC","o794709257"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"121","IMPORT-1377205600.55222","01300cam a2200349Ia 4500ocn761313353OCoLC20130116224609.0111117s2008 sp a e 000 0 spa d7096826739788496707542 (pbk.)8496707547 (pbk.)(OCoLC)761313353(OCoLC)709682673WIVengWIVQBXBTCTAYDXCPBDXOCLCQOCLCOKNJUtOrBLWspaitaNTGAHM1106.F35 2008158.2 FAL SPANISH 2008Falworth, Omar.El arte de-- vivir bien con los demas :sencillos pero eficaces consejos para los que quieren conocer a los demás, estar bien con todos y tener verdaderos amigos /Omar Falworth ; [traducción de la edición italiana: Julieta Carmona Lombardo].Vivir bien con los demas3a ed.Barcelona :Terapias Verdes,2008.1 v. (unpaged) :ill. ;22 cm.Edición original: L'arte di-- vivere bene con gli altri.Interpersonal relations.Spanish language editionNonfiction.localCarmona Lombardo, Julieta.lmc2013-01-16aC0NTG",null,null,"OCLC","o761313353"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"122","IMPORT-1377205600.55222","01336cam a2200349Ma 4500ocn794592518OCoLC20130116174415.0120531s2012 mx a e b 000 0 spa d60711078819786071107886(OCoLC)794592518YDXCPYDXCPBTCTABDXTXBXLOCLCOMLYJQMSJPUtOrBLWNTGA305.4223158 VAR SPANISHVargas de González Carbonell, Gabriela.Soy mujer, soy invencible, y estoy exhausta! /Gaby Vargas.México :Punto De Lectura,2012.411 p. :ill. ;19 cm.Includes bibliographical references.Provides advice for women, based on the author's experiences and knowledge of Mexican women, about balancing relationships, work, family life, health, and personal development.Contenido: Yo con mi pareja --Yo y mi trabajo --Yo con mi familia --Yo y mi salud - Yo conmigo mismaWomenPsychology.WomenSocial conditions.Interpersonal relations.Social sciencesSociology.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG",null,null,"OCLC","o794592518"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"123","IMPORT-1377205600.55222","01096cam a2200361 a 4500ocn743214105OCoLC20130116201107.0110720s2012 nyua e b 001 0 eng 20110294939780816084838 (alk. paper)0816084831 (alk. paper)(OCoLC)743214105DLCengDLCYDXBTCTABDXYDXCPBWXNTGUtOrBLWpccNTGABJ1012.B56 201217023170 BOEBoersema, David.The Facts on File guide to philosophy.Values and the good life /David Boersema, Kari Middleton.Values and the good lifeNew York :Facts On File,c2012.x, 308 p. :ill. ;24 cm.Includes bibliographical references (p. 280) and index.Ethics.Political sciencePhilosophy.Aesthetics.Middleton, Kari.AMW2013-01-16aC0NTG",null,null,"OCLC","o743214105"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"124","IMPORT-1377205600.55222","01587cam a2200361 a 4500ocn741751372OCoLC20130116184237.0110714s2012 nyua e b 001 0 eng 2011029436 201101686497808160815850816081581(OCoLC)741751372DLCengDLCYDXBDXYDXCPBWXOCLCOWSONTGUtOrBLWpccNTGAB72.B66 201219023190 BOEBoersema, David.The Facts on File guide to philosophy.History of Western philosophy /David Boersema, Kari Middleton.History of Western philosophyNew York :Facts on File,c2012.xi, 426 p. :ill. ;24 cm.Includes bibliographical references (p. 400) and index.Introductory discussion questions --The Greek miracle --Elements and the nature of change --Heraclitus and change --Changelessness and mathematics --Greek Atomism --Sophists and Socrates --Plato --Aristotle --Hellenistic philosophy --Medieval and Renaissance philosophy --Concluding discussion questions.Offers a historical introduction to Western philosophy, describing notable philosophers and movements such as Aristotle, Socrates, existentialism, analytic philosophy, and logical positivism.PhilosophyHistory.Middleton, Kari.AMW2013-01-16aC0NTG",null,null,"OCLC","o741751372"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"125","IMPORT-1377205600.55222","01361cam a22003258a 4500ocn779266328OCoLC20130116221114.0120319s2012 nyu e 000 0 eng 201200866297814516090281451609027(OCoLC)779266328DLCengDLCBTCTABDXOCLCOYDXCPBWXNTGUtOrBLWpccNTGABT752.R565 201224323243 ROLRollins, Peter.The idolatry of God :breaking our addiction to certainty and satisfaction /Peter Rollins.New York :Howard Books,2012.viii, 208 p. ;21 cm.The church shouldn't do worship, the charts have it covered --On not getting what you want, and liking it --Hiding behind the mask that we are --Be part of the problem, not the solution --Trash of the world --The fool says in his heart, \"There is knowing god\" --I need your eyes in order to see myself --Destroying Christianity and other Christian acts --Want to lose belief?: join the church --Conclusion: a faith full of signs and wonders.SalvationChristianity.Christian life.JL2013-01-16aC0NTG",null,null,"OCLC","o779266328"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"126","IMPORT-1377205600.55222","02107cam a22003858a 4500ocn788298783OCoLC20130116212531.0120614s2012 inu e b 000 0aeng 20120221789781594712920 (pbk.)1594712921 (pbk.)9781594713507 (e-book)1594713502 (e-book)(OCoLC)788298783DLCengDLCBTCTAOCLCOBDXORXNTGUtOrBLWpccNTGABX4705.H3337A3 2012248.2/42092B23248.242092 HAWHaw, Chris,1981-From Willow Creek to Sacred Heart :rekindling my love for Catholicism /Chris Haw.Notre Dame, Ind. :Ave Maria Press,c2012.xix, 234 p. ;22 cm.Includes bibliographical references (p. 211-234).Action(with some contemplation) --From mass to megachurch --From class to streets --From streets to jungle --From jungle to war --From war to concrete jungle --Contemplation(with some action) --Murder and the mass --Pagan Christianity --Search for no accent: or, the impossibility of nondenominationalism --On being part of a terrible organization: or, how to treat the church like a (dysfunctional) family --Art and apocalypse.All Christians believe the same thing, right? Haw, co-author of Jesus for President, would whole-heartedly disagree. But he expresses that disagreement with a refreshing dose of respect, objectivity, and love. The author is an expert at describing the beauty and edifying character of both evangelical Christianity and Roman Catholicism, as well as the numerous challenges they face. After a number of years immersed in Protestantism, he ultimately chooses to return to the Catholic Church into which he was born.Haw, Chris,1981-ConversionCatholic Church.Christian biography.edl2013-01-16aC0NTG",null,null,"OCLC","o788298783"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"127","IMPORT-1377205600.55222","01215cam a2200373Ia 4500ocn819324712OCoLC20130116230236.0121126s2011 flu e b 000 0 spa d97807899170410789917041(OCoLC)819324712HT#HT#BDXUtOrBLWspaengNTGABV4627.A5 A548 2011248.4 AND SPANISHAnderson, Neil T.,1942-Getting anger under control.SpanishControla tu ira /Neil T. Anderson y Rich Miller.1a ed.Miami, FL :Editorial Unilit,c2011.248 p. ;21 cm.Text in Spanish.\"Vence: el resentimiento sin resolver, las emociones abrumadoras, las mentiras detrás de la ira.\"Translation of: Getting anger under control.Includes bibliographical references (p. 243-248).AngerReligious aspectsChristianity.Spanish language editionNonfiction.localMiller, Rich,1954-lmc2013-01-16aC0NTG",null,null,"OCLC","o819324712"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"128","IMPORT-1377205600.55222","01410cam a2200361 a 4500ocn783138158OCoLC20130116221955.0120418s2012 miua e b 000 0 eng 20120132829780310494843 (hardcover)0310494842 (hardcover)(OCoLC)783138158DLCengDLCYDXBTCTABDXYDXCPBWXNTGUtOrBLWpccNTGABV4647.H67S73 201225323253 STAStanley, Andy.Deep & wide :creating churches unchurched people love to attend /Andy Stanley.Deep and wideGrand Rapids, Mich. :Zondervan,c2012.350 p. :ill. ;23 cm.Includes bibliographical references (p. 347-350).Author and pastor Andy Stanley draws from Scripture and over 25 years of pastoral experience to communicate to church leaders how they can create a church where believers can have a growing faith in Jesus and at the same time unbelievers can make a vital and lasting connection---a ministry that is both deep and wide.HospitalityReligious aspectsChristianity.Non-church-affiliated people.Church marketing.AMW2013-01-16aC0NTG",null,null,"OCLC","o783138158"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"129","IMPORT-1377205600.55222","01021nam a2200337Ia 4500ocn810436331OCoLC20130116234631.0120921s2011 ve c e 000 0 spa d97898036908239803690825(OCoLC)810436331HT#HT#NTGUtOrBLWNTGA299.93 MEN SPANISH 2011Méndez, Conny.Piensa lo bueno y se te dará /Conny Méndez.2d ed.Caracas, Venezuela :Ediciones Giluz :Bienes Lacónica :Distribuidora Gilavil,2011.111 p. :ill. ;20 cm.Colección metafísicaNew Age movementReligion.Occultism.Metaphysics.Spiritual lifeNew Age movement.Spanish language editionNonfiction.localColección Metafísica.csr2013-01-16aC0NTG",null,null,"OCLC","o810436331"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"130","IMPORT-1377205600.55222","01635cam a2200373 a 4500ocn779266207OCoLC20130116181755.0120316s2013 nyu e 000 0aeng 20120107319781439190586 (hbk.)1439190585 (hbk.)9781439190616 (ebk.)1439190615 (ebk.)(OCoLC)779266207DLCengDLCIG#BTCTABDXYDXCPGK8ILCNTGUtOrBLWNTGACT275.S5918A3 2013302.34092B23302.34092 SONSonnenberg, Susanna,1965-She matters :a life in friendships /Susanna Sonnenberg.1st Scribner hardcover ed.New York :Scribner,2013.vii, 255 p. ;24 cm.Illuminates the friendships that have influenced, nourished, inspired, and haunted the author--and sometimes torn her apart--each of which has its own lessons that she seeks to understand.She matters --Young. Women are like this ;Real friends ;Facebook ;Proctor duties ;The root cellar --Aware. Roommate ;Homesick ;Annabelle upstairs ;Blind date ;Evidence ;Within reach ;Kindling --Awake. We turn into mothers ;Orphan girl ;Naked ;Boundaries ;Ritual ;Real estate ;The four seasons ;As we both know.Sonnenberg, Susanna,1965-Sonnenberg, Susanna,1965-Friends and associates.Female friendship.edl2013-01-16aC0NTG",null,null,"OCLC","o779266207"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"131","IMPORT-1377205600.55222","01814cam a2200397Ia 4500ocn747529314OCoLC20130116225139.0110818s2011 mx e 000 0 spa d97860748017816074801789(OCoLC)747529314BTCTAengBTCTAYDXCPEXRGPIH7HUtOrBLWspaengNTGAF1221.Y3C373718 2011306.092 WOL SPANISHWolf, Felix.Art of navigation.SpanishLas enseñanzas de Carlos Castaneda :aprendizaje y espiritualidad /Felix Wolf ; [Verónica Gerber Bicecci por la [traducción].1a. ed. en México.México, D.F. :Vergara,c2011.335 p. ;23 cm.Text in Spanish.Titulo original: The art of navigation : travels with Carlos Castaneda and beyond. Originally published: San Francisco : Council Oak Books, 2010.\"Millenium\"--Added t.p. and p. [4] of cover.Con fidelidad, respeto y humildad por la vida, Las enseñanzas de Carlos Castaneda us un libro que comparte una visión distinta al actual sentimiento de hartazgo y desesperanza que prepondera en nuestra sociedad, brindándonos la oportunidad de maravillarnos por el simple hecho de ser día a día seres de luz capaces de fortalecer nuestro cuerpo y espíritu con la bendición de la tierra, y lo que hay en ella.Castaneda, Carlos,1931-1998Criticism and interpretation.Yaqui IndiansReligion.Shamanism.Nagualism.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG",null,null,"OCLC","o747529314"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"132","IMPORT-1377205600.55222","01103cam a2200337Ia 4500ocn767569919OCoLC20130116210527.0111203s2011 sp a e b 000 0 spa d9788441530218 (pbk.)8441530211 (pbk.)(OCoLC)767569919BTCTAengBTCTAYDXCPBDXC3LKNJNTGUtOrBLWNTGA302.23123323.445 BER SPANISHBergareche Sainz de los Terreros, Borja,1977-Wikileaks confidencial /Borja Bergareche Sainz de los Terreros ; prólogo de Rosental Calmon Alves.Madrid, Spain :Anaya Multimedia,2011.213 p. :ill. ;23 cm.800 booksIncludes bibliographical references (p. 207-213)WikiLeaks (Organization)Assange, Julian.News Web sites.Freedom of information.Spanish language editionNonfiction.localcsr2013-01-16aC0NTG",null,null,"OCLC","o767569919"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"133","IMPORT-1377205600.55222","01634cam a2200349Ia 4500ocn785079137OCoLC20130116202802.0120407r20122011nyuaf e b 001 0 eng d97816059839811605983985(OCoLC)785079137BTCTAengBTCTABDXYDXCPJAGOCLCOB@LPATCO2BURNTGUtOrBLWNTGAUB251.G7C67 2012327.124100923327.1241 CORCorera, Gordon.The art of betrayal :the secret history of MI6 /Gordon Corera.New York :Pegasus Books,c2012.481 p., [16] p. of plates :ill. ;24 cm.Originally published: London: Weidenfeld & Nicolson, 2011.Includes bibliographical references (p. 414-461) and index.From Berlin to the Congo, from Moscow to the back streets of London, these are the stories of the agents on the front lines of British intelligence. And the truth is often more remarkable than fiction.Into the shadows : life and death in Vienna --The cost of betrayal --A river full of crocodiles : murder in the Congo --Moscow rules --The wilderness of mirrors --Compromising situations --Escape from Moscow --The Afghan plains --Out of the shadows --In the bunker.Great Britain.MI6History20th century.Intelligence serviceGreat BritainHistory20th century.SpiesGreat Britain.edl2013-01-16aC0NTG",null,null,"OCLC","o785079137"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"134","IMPORT-1377205600.55222","02607cam a2200373 a 4500ocn772099444OCoLC20130116185343.0120509s2013 mau e b 001 0 eng 20120173239780547317274054731727140021729534(OCoLC)772099444DLCengDLCBTCTABDXYDXCPBWXMOFBURABGVP@YUSNTGUtOrBLWpccn-us---n-us-nyNTGAHG4910.W357 2013332.63/2097323332.632097 WEAWeatherall, James Owen.The physics of Wall Street :a brief history of predicting the unpredictable /James Owen Weatherall.Boston :Houghton Mifflin Harcourt,2013.xviii, 286 p. ;24 cm.Includes bibliographical references (p. [250]-268) and index.While many of the mathematicians and software engineers on Wall Street failed when their abstractions turned ugly in practice, a special breed of physicists has a much deeper history of revolutionizing finance. From fin-de-siècle Paris to Rat Pack-era Las Vegas, from wartime government labs to Yippie communes on the Pacific coast, Weatherall shows how physicists successfully brought their science to bear on some of the thorniest problems in economics, from options pricing to bubbles. The 2008 crisis was partly a failure of mathematical modeling, but even more, it was a failure of some very sophisticated financial institutions to think like physicists. Models--whether in science or finance--have limitations; they break down under certain conditions. And in 2008, sophisticated models fell into the hands of people who didn't understand their purpose, and didn't care. It was a catastrophic misuse of science. The solution, however, is not to give up on models; it's to make them better. Weatherall reveals the people and ideas on the cusp of a new era in finance. This book is riveting history that will change how we think about our economic future.--From publisher description.Of quants and other demons --Primordial seeds --Swimming upstream --From coastlines to cotton prices --Beating the dealer --Physics hits the street --The prediction company --Tyranny of the Dragon King --A new Manhattan project --Send physics, math, and money!SecuritiesUnited States.Wall Street (New York, N.Y.)edl2013-01-16aC0NTG",null,null,"OCLC","o772099444"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"135","IMPORT-1377205600.55222","01277cam a2200349Ia 4500ocn757473443OCoLC20130116201544.0111012s2011 sp b e 000 0 spa d97884152561518415256159(OCoLC)757473443BTCTAengBTCTAYDXCPBDXQBXTXGNTGUtOrBLWspafreNTGAQ172.5.E77B3818 201150023500 BAU SPANISHBaudet, Jean(Jean C.)Curieuses histoires de la science.SpanishNuevas historias curiosas de la ciencia :[relatos fascinantes sobre descubrimientos fundamentales y errores imperdonables] /Jean C. Baudet ; traducción de Eva Jiménez Julià.Teià, Barcelona :Ma Non Troppo,c2011.235 p. :ports. ;23 cm.Subtitle from cover.Translation of: Curieuses histoires de la science : quand les chercheurs se trompent.Errors, ScientificHistoryPopular works.ScienceHistoryPopular works.Spanish language editionNonfiction.localcsr2013-01-16aC0NTG",null,null,"OCLC","o757473443"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"136","IMPORT-1377205600.55222","01627cam a2200409Ia 4500ocn761850659OCoLC20130614123750.0111117s2011 sp a e b 001 0 spa d97884807692978480769297(OCoLC)761850659BTCTAengBTCTAYDXCPBDXZP7QBXTXGGPISAPNTGUtOrBLWspafreNTGAQE862.D5M5218 2011567.922567.9 MIC SPANISHMichard, Jean-Guy.Monde perdu des dinosaures.SpanishEl mundo perdido de los dinosaurios /Jean-Guy Michard ; [traducción, documentación y adaptación, Manuel Pijoan Rotgé].1a ed. en lengua española.Barcelona :Blume,2011.143 p. :ill. (some col.) ;18 cm.Biblioteca ilustrada ;11Descubrir la ciencia y la técnicaTranslation of: Le monde perdu des dinosaures.Elogio de la diversidad --En busca de una identidad --Historia natural de un dinosaurio ordinario --La ciencia puesta en duda --Ser dinosaurio en un mundo en plena mutación.Includes bibliographical references (p. 136) and index.DinosaursPopular works.Pijoan Rotgé, Manuel.Biblioteca ilustrada (Blume Ediciones (Barcelona, Spain)) ;11.Biblioteca ilustrada.Descubrir la ciencia y la técnica.csr2013-01-16aC0NTG",null,null,"OCLC","o761850659"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"137","IMPORT-1377205600.55222","00884cam a2200277Ia 4500ocn768169362OCoLC20130116174706.0111208s2011 mx e 000 0 spa d9786073102698 (pbk.)6073102690 (pbk.)(OCoLC)768169362BTCTAengBTCTAYDXCPBDXJTBUtOrBLWNTGA613.25 COR SPANISHCordoniz, Andréa.Exorciza a tu alma gorda :una guía para cambiar tus hábitos y adelgazar para siempre /Andréa Cordoniz ; traducción Pilar Obón.México, D.F. :Grijalbo,2011.135 p. ;15 cm.Weight loss.Spanish language editionNonfiction.localObón, Pilar.lmc2013-01-03aC0NTG",null,null,"OCLC","o768169362"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"138","IMPORT-1377205600.55222","01496cam a2200361 a 4500ocn796758568OCoLC20130116220539.0120913s2013 nju e b 001 0 eng 20120300489781118185117 (hbk.)1118185110 (hbk.)9781118235966 (ebk.)1118235967 (ebk.)(OCoLC)796758568DLCengDLCIG#BTCTABDXOCLCOYDXCPWIQNTGUtOrBLWNTGARM222.2.W4513 2013613.2/523613.25 WHYWhyte, John,1953-AARP new American diet :lose weight, live longer /John Whyte.Hoboken, N.J. :John Wiley & Sons,c2013.xiv, 226 p. ;25 cm.Includes bibliographical references and index.The promise of weight loss and a longer life --The healing power of food --AARP new American diet nutrition basics --You don't have to be overweight --Dealing with emotional eating and cravings --AARP new American diet guidelines --The meal plans: 7, 14, and 30 days --AARP new American diet recipes --Reducing your risk of disease: cancer, diabetes, and heart disease --Eat well, get fit, sharpen your brain.Weight loss.Reducing dietsPsychological aspects.Nutrition.JL2013-01-16aC0NTG",null,null,"OCLC","o796758568"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"139","IMPORT-1377205600.55222","01288cam a2200397Ia 4500ocn752071979OCoLC20130614123750.0110909s2011 sp a e 001 0 spa d9788425519727 (pbk.)8425519721(OCoLC)752071979BTCTAengBTCTAYDXCPZQPSAPBDXORXUtOrBLWspagerNTGA613.723613.7 GRA SPANISHGrabbe, Dieter.Gute Figur Quickies.SpanishUna buena figura rápido :ponte en forma y mejora tu salud : 10 minutos al día de ejercicios para gente con poco tiempo /Dieter Grabbe ; [traducción, Margarita Gutiérrez].Buena figura rápidoBarcelona :Hispano Europea,c2011.96 p. :col. ill. ;23 cm.Translation of: Gute Figur Quickies.Includes index.Physical fitness.Exercise.Reducing exercises.Health.Weight loss.Spanish language editionNonfiction.localGutiérrez, Margarita.lmc2013-01-16aC0NTG",null,null,"OCLC","o752071979"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"140","IMPORT-1377205600.55222","01785cam a2200373Ka 4500ocn756584063OCoLC20130614123750.0111005s2012 cau e b 000 0 spa d97814019394581401939457(OCoLC)756584063BTCTAengBTCTAYDXCPBDXOQXIGANTGUtOrBLWspaengNTGARA776.95.M2618 2012613 MAN SPANISHMancini, Fabrizio,1965-Power of self-healing.SpanishEl poder de la autosanación :libera tu potencial natural de sanación en 21 días /Fabrizio Mancini ; [foreword by Wayne W. Dyer ; traducción, Adriana Miniño].Carlsbad, Calif. :Hay House,c2012.xx, 306 p. ;23 cm.Translation of: The power of self-healing : unlock your natural healing potential in 21 days.Includes bibliographical references.Para la mayoría de nosotros no es extraño tener problemas de salud, enfermedades o dolor. Pero, ¿y si hubiera una solución para restaurar nuestro bienestar que no involucrara fármacos, cirugía u otros procedimientos médicos? Pues bien, la hay, y la encontrarás en el interior de tu propio cuerpo. ¡Asi es! Tu cuerpo tiene integrada la capacidad de sanación: un sistema sorprendente de autoreparación que trabaja día y noche, y está bajo tu control fomentarla. --Publisher.Self-care, Health.Healing.Mind and body therapies.Spanish language editionNonfiction.localMiniño, Adriana.csr2013-01-16aC0NTG",null,null,"OCLC","o756584063"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"141","IMPORT-1377205600.55222","01696cam a2200385Ia 4500ocn798057520OCoLC20130614123750.0120627s2012 flu e 000 0aspa d97816143565851614356580(OCoLC)798057520BTCTAengBTCTABDXGZDAZTOCLCOGPIUtOrBLWspaengn-us---NTGA616.99437 PAU SPANISHPausch, Jai,1966-Dream new dreams.SpanishVolver a soñar :reimaginar la vida después de perder a un ser querido /Jai Pausch.1a ed.Doral, FL :Aguilar,2012.xv, 236 p. ;21 cm.Título original: Dream new dreams.En Volver a soñar, Jai Pausch narra por primera vez su propia historia: el tránsito emocional de esposa y madre a cuidadora a jornada completa que viajaba incansablemente entre sus tres hijos pequeños y los hospitales en los que Randy era sometido a tratamiento, lejos de su hogar; y posteriormente a viuda y madre sin esposo en lucha constante por mantener el sentido de estabilidad en su familia, y hacerle frente a su propio dolor y a los retos cotidianos del hogar ...Pausch, Jai,1966-Women caregiversUnited StatesBiography.PancreasCancerPatientsFamily relationshipsUnited States.Pausch, RandyHealth.DeathPsychological aspects.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG",null,null,"OCLC","o798057520"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"142","IMPORT-1377205600.55222","01474cam a2200349 a 4500ocn775406332OCoLC20130116022401.0120822s2013 nyua e b 001 0 eng 20120286929780757003493 (pbk.)0757003494 (pbk.)(OCoLC)7754063321350030QBIDLCengDLCQBXBTCTABDXOCLCOORXVETJO8NTGUtOrBLWNTGARC901.7.H45S64 2013617.4/6105923617.461059 SNYSnyder, Rich.What you must know about dialysis :the secrets to surviving and thriving on dialysis /Rich Snyder.DialysisGarden City Park, NY :Square One Publishers,c2013.vii, 197 p. :ill. ;23 cm.Includes bibliographical references (p. 177-188) and index.For most people, dialysis is an unplanned and uncharted journey. In this book, Dr. Snyder provides you with a powerful beacon to light the path ahead with compassion and with rock-solid answers to your most important questions. This book not only answers common questions about dialysis, but, also, offers practical advice, commonsense strategies and complementary options.HemodialysisPatients.HemodialysisPopular works.JL2013-01-16aC0NTG",null,null,"OCLC","o775406332"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"143","IMPORT-1377205600.55222","00965cam a2200313Ia 4500ocn801698548OCoLC20130116153058.0120725s2012 sp a e b 000 0 spa d97884255202118425520215(OCoLC)801698548JQWJQWJQWUtOrBLWNTGA636.7088723636.70887 SEW SPANISHSewell, Ken.Adiestramiento que funciona :obediencia básica /Ken Sewell.Obediencia básicaBarcelona :Hispano Europea,c2012.96 p. :col. ill. ;23 cm. +1 videodisc (120 min. : sd., col. ; 4 3/4 in.)In Spanish.Includes bibliographical references (p. 94-95).DogsTraining.Spanish language editionNonfiction.locallmc2013-01-16aC0NTG",null,null,"OCLC","o801698548"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"144","IMPORT-1377205600.55222","01453cam a2200373Ia 4500ocn796276333OCoLC20130614123750.0120614s2012 enka e d 000 0 eng dGBB259136bnb016106552Uk7882882049780199640249 (hbk.)0199640246 (hbk.)(OCoLC)796276333(OCoLC)788288204UKMGBengUKMGBOCLCOYDXCPBWKYNKBWXEYMORXOCLCOBTCTABDXNTGUtOrBLWNTGATX349.A86 2012641.0323641.03 AYTAyto, John.The diner's dictionary :word origins of food & drink /John Ayto.2nd ed.Oxford :Oxford University Press,c2012.x, 405 p. :ill. ;21 cm.\"First edition published in 1990 by Routledge as The glutton's glossary; Revised edition publsihed 1993 by arrangement with Routledge as The diner's dictionary; First issued as an Oxford University Press paperback 1994 as The Gourmet's guide; Second edition published 2012 by arrangement with Routledge as The diner's dictionary\"--T.p. verso.FoodDictionaries.BeveragesDictionaries.CookingDictionaries.Ayto, John.Glutton's glossary.AMW2013-01-16aC0NTG",null,null,"OCLC","o796276333"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"145","IMPORT-1377205600.55222","02196cam a2200481Ia 4500ocn824512563OCoLC20130614123750.0130116s2012 ch a e b 001 0 chi d9789866179341 (v. 1)9866179346 (v. 1)(OCoLC)824512563NTGNTGNTGUtOrBLWchiengNTGA641.523641.5 MCG CHINESE V.1McGee, Harold.Keys to good cooking.Chinese880-01Chu yi zhi yao.Shang =Keys to good cooking: a guide to making the best of foods and recipes /[Haluode.maji (Harold McGee) zhu ; Deng Ziijin yi].Chu yi zhi yao. 1Keys to good cooking: a guide to making the best of foods and recipes880-02Chu ban.880-03Xin bei shi :Da jia chu ban :Yuan zu wen hua fa xing,2012.xvii, 267, xxxv :ill. ;23 cm.In traditional Chinese and some English.Translation of: Keys to good cooking: a guide to making the best of foods and recipes.880-04\"Wan quan zhang wo chu fang,wan mei liao li shi cai\"--Cover.A one-stop reference answers nearly every kitchen conundrum the home cook may have in a single volume, from equipment and cooking methods to how to handle nearly every ingredient. By the author of On Food and Cooking: The Science and Lore of Kitchen.Includes bibliographical referencesand index.Cooking.Food.Chinese language edition (Traditional)Nonfiction.local880-05Deng, Zijin.245-01廚藝之鑰.上 =Keys to good cooking: a guide to making the best of foods and recipes /[哈洛德.馬基(Harold McGee)著 ; 鄧子衿譯]250-02初版260-03新北市 :大家出版 :遠足文化發行,2012.500-04\"完全掌握廚房,完美料理食材\"--Cover.700-05鄧子衿,translatorst2013-01-16aC0NTG",null,null,"OCLC","o824512563"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"146","IMPORT-1377205600.55222","03263cam a22003978a 4500ocn741542746OCoLC20130116175518.0111213s2013 nyua e b 001 0 eng 201104443797814516556811451655681(OCoLC)741542746DLCengDLCBTCTAYDXCPBDXOCLCOZS3IK2NTGUtOrBLWpccNTGATX837.P67 2012641.5/63623641.5636 PORPorter, Jessica.The MILF diet :let the power of whole foods transform your body, mind, and spirit --deliciously /Jessica Porter.1st Emily Bestler/Atria Books hardcover ed.New York :Atria Books,2013.vii, 360 p. :col. ill. ;24 cm.Includes bibliographical references (p. 347) and indexes.\"From the acclaimed author of The Hip Chick's Guide to Macrobiotics, the MILF diet is what happens when the Skinny Bitch grows up and gets married. MILF has become a popular slang term for sexually attractive older women. But Jessica Porter wants women to own the term and reunite sexuality with the special gifts of womanhood. One of the quickest routes to becoming a MILF is by eating whole, natural foods and letting go of the processed stuff. As a result, the female body finds its peaceful home again. A MILF feels energized, yet relaxed and clear headed.The MILF Diet is a practical cookbook for women who want to achieve optimal health and happiness. It includes not only seventy-five photographs and recipes but tips for keeping a MILF-y kitchen and food plan, and MILF weight loss. Despite being centered around whole grains, organic seasonal vegetables, sea vegetables, natural sweeteners, and mostly plant-based proteins--while suggesting giving up dairy, white sugar, processed foods, and meat--the diet can be followed by non-vegetarians, vegetarians, and vegans alike. By eating the whole, natural and delicious foods of The MILF Diet, a MILF-to-be will not only turn back the clock, gain a spicier sex life and find her inner goddess, she will also strengthen her immune system, experience a newfound energy and reduce her risk of disease. Your DILF won't know what hit him\"--Provided by publisher.\"The MILF Diet is a practical cookbook for women who want to achieve optimal health and happiness. It will include not only 75-100 recipes but tips for keeping a MILF-y kitchen and food plan, MILF weight loss, and MILF beauty tips! Despite being centered around whole grains, organic seasonal vegetables, sea vegetables, natural sweeteners, mostly plant-based proteins, and suggesting giving up dairy, white sugar, processed foods and factory-farmed meat, the diet can be followed by non-vegetarians, vegetarians, and vegans alike\"--Provided by publisher.Vegetarian cooking.Natural foods.WomenHealth and hygiene.Cookbooks.lcgftCover imagehttp://www.netread.com/jcusers2/1247/681/9781451655681/image/lgcover.9781451655681.jpgedl2013-01-16aC0NTG",null,null,"OCLC","o741542746"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"147","IMPORT-1377205600.55222","01735nam a2200457Ii 4500ocn798307498OCoLC20130614123750.0120706s2012 ch ap e 000 0dchi d97898661992959866199290(OCoLC)798307498CYTCYTchiCYTNTGUtOrBLWNTGA641.824 WAN CHINESE880-01Wang, Anqi.880-02100 \"%\"cheng gong!chao jian dan de song bing fen fa shi xian dian :yong song bing fen qing song zuo chu mei ge ren du xi ai de 70 zhong fa shi xing fu dian xin /Wang Anqi zuo.880-03100 cheng gong!chao jian dan de song bing fen fa shi xian dian.880-04Taipei shi :Bang lian wen hua,2012.125 p. :col. ill. ;26 cm.880-05Zi wei guan ;61880-06Wang an qi yong an ni.In traditional Chinese.Pies.Pastry.Quiches (Cooking)Cooking, French.Cookbooks.lcgftChinese language edition (Traditional)Nonfiction.local880-07Zi wei guan ;61.100-01王安琪245-02100%成功!超簡單的鬆餅粉法式鹹點 :用鬆餅粉輕鬆做出每個人都喜愛的70種法式幸福點心 /王安琪作246-03100成功!超簡單的鬆餅粉法式鹹點260-04臺北市 :邦聯文化,2012.490-05滋味館 ;61.500-06王安琪用安妮830-07滋味館 ;61.st2013-01-16aC0NTG",null,null,"OCLC","o798307498"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"148","IMPORT-1377205600.55222","01130cam a2200337Ia 4500ocn809123806OCoLC20130116201305.0120822s2012 ohua e 001 0 eng d016156395Uk9781440321016 (hbk.)1440321019 (hbk.)(OCoLC)8091238061349552QBIUKMGBengUKMGBOCLCOBDXA6QQBXOQXVP@NTGUtOrBLWNTGATT180.M55 2012684.0823684.08 MILMiller, Jeff,1956-The foundations of better woodworking :[how to use your body, tools and materials to do your best work] /by Jeff Miller.WoodworkingCincinnati, Ohio :Popular Woodworking ;Georgetown, Ont. :Distributed in Canada by Fraser Direct,c2012.192 p. :col. ill. ;29 cm.Subtitle from cover.Includes bibliographical references (p. 188) and index.Woodwork.JL2013-01-16aC0NTG",null,null,"OCLC","o809123806"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"149","IMPORT-1377205600.55222","01783cam a2200457Ia 4500ocn761854003OCoLC20130614123750.0111118s2011 sp a e b 001 0 spa d97884807693038480769300(OCoLC)761854003BTCTAengBTCTAYDXCPBDXZP7TXGGPIOCLCOSAPNTGUtOrBLWspafree-sp---NTGA720.9223720.92 THI SPANISHThiébaut, Philippe.Gaudí, bâtisseur visionnaire.SpanishGaudí :arquitecto visionario /Phillipe Thiébaut ; [traducción, Marina Huguet Cuevas].1. ed. en lengua españ̃ola.Barcelona :Blume,2011.127 p. :ill. (some col.) ;18 cm.Biblioteca ilustrada ;7Descubrir el arteTranslation of: Gaudí, bâtisseur visionnaire.Un arquitecto, una ciudad --De Oriente al nacionalismo catalán --Gótico y racionalismo --Hacia una arquitectura orgánica --El templo helénico del gótico mediterráneo.Includes bibliographical references (p. 122-123) and index.Gaudí, Antoni,1852-1926.ArchitectureSpainBarcelona.Architecture, Modern19th century.Architecture, Modern20th century.Spanish language editionNonfiction.localHuguet Cuevas, Marina.Biblioteca ilustrada.Descubrir el arte.Biblioteca ilustrada (Blume Ediciones (Barcelona, Spain)) ;7.csr2013-01-16aC0NTG",null,null,"OCLC","o761854003"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"150","IMPORT-1377205600.55222","01150cam a2200325Ia 4500ocn809534372OCoLC20130116234221.0120910s2012 enka e 000 0 eng d 201194287497814380012581438001258(OCoLC)809534372IMDIMDYDXCPNTGUtOrBLWNTGA741.2 CURCurnow, Vera.Drawing and sketching :expert answers to the questions every artist asks /Vera Curnow.1st U.S. ed.London :Quantum Books ;Hauppage, NY :Barrons Educational Series,2012.224 p. :col. ill. ;20 cm.Art answersIncludes index.Media and application techniques --Other equipment --Basic practices --Tone, color, and composition --Working on location --Landscapes and townscapes --Figures and animals --Still life and flowers --Approaches and style.DrawingTechnique.Art answers.jmhaC0NTG",null,null,"OCLC","o809534372"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"151","IMPORT-1377205600.55222","01090cam a2200349Ia 4500ocn788293318OCoLC20130116204308.0120421s2012 moua e 6 000 1 eng d 20129367509781449423063 (pbk.)144942306X (pbk.)(OCoLC)788293318BTCTAengBTCTABDXWIMCPLUtOrBLWn-us---NTGAPN6728.F64A46675 2012741.5697323741.56973 AMEAmend, Bill.FoxTrot.SelectionsJasotron: 2012 :a FoxTrot collection /by Bill Amend.Kansas City, Mo. :Andrews McMeel Pub.,c2012.144 p. :chiefly col. ill. ;21 x 26 cm.Collection of previously published comic strips.FamiliesComic books, strips, etc.American wit and humor, Pictorial.Comic books, strips, etc.United States.djn 2013-01-16aC0NTG",null,null,"OCLC","o788293318"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"152","IMPORT-1377205600.55222","01278cam a2200361Ia 4500ocn779265290OCoLC20130116204853.0120304s2012 moua e 6 000 1 eng d 20129367409781449424046144942404X(OCoLC)779265290BTCTAengBTCTABDXOEMTXAWIMUtOrBLWNTGA741.56/97323741.56973 TOOToomey, Jim P.Sherman's Lagoon.SelectionsThink like a shark :avoiding a porpoise-driven life : the seventeenth Sherman's Lagoon collection /by Jim Toomey.Avoiding a porpoise-driven lifeKansas City, Mo. :Andrews McMeel Pub.,c2012.127 p. :chiefly ill. (some col.) ;23 cm.More adventures of Sherman the shark and his friends on Kapupu Island.SharksComic books, strips, etc.Seashore animalsComic books, strips, etc.Marine animalsComic books, strips, etc.American wit and humor, Pictorial.Comic books, strips, etc.United States.djn 2013-01-16aC0NTG",null,null,"OCLC","o779265290"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"153","IMPORT-1377205600.55222","01234cam a2200409 i 4500ocn788282347OCoLC20130116172401.0120705t20122012waua e b 000 0 eng 2012024301GBB288928bnb016163382Uk8095640059781604680867 (pbk.)1604680865 (pbk.)(OCoLC)788282347(OCoLC)809564005DLCengrdaDLCBTCTABDXOCLCOCDXUKMGBNTGUtOrBLWpccNTGATT835.F547 2012746.4623746.46 FIN Finley, Jill,1956-Home sweet quilt :fresh, easy quilt patterns from Jillily Studio /Jill Finley.Bothell, WA :Martingale,[2012]96 pages :illustrations (chiefly color) ;28 cmtextrdacontentunmediatedrdamediavolumerdacarrierIncludes bibliographical references.PatchworkPatterns.QuiltingPatterns.House furnishings.AMW2013-01-16aC0NTG",null,null,"OCLC","o788282347"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"154","IMPORT-1377205600.55222","02089cam a22003618a 4500ocn785077610OCoLC20130116201040.0121015s2012 nyua e 000 0 eng 201203778297803991616430399161643(OCoLC)785077610DLCengDLCBTCTABDXOCLCOJTHYDXCPVHPABGMOFNTGUtOrBLWNTGAPN1997.B444B75 2012791.43/7223791.4372 BRIBridges, Jeff,1949-The Dude and the Zen master /Jeff Bridges & Bernie Glassman.New York :Blue Rider Press,c2012.272 p. :ill. ;22 cm.Just throw the fu**ing ball, man! Sometimes you eat the bear, and sometimes, well, he eats you ;It's down there somewhere, let me take another look ;Dude, you're being very unDude --The Dude abides and the Dude is not in. Yeah, well, ya know, that's just like, uh, your opinion, man ;Phone's ringin', Dude ;New sh** has come to light --That rug really tied the room together, did it not? You know, Dude, I myself dabbled in pacifism at one point. Not in 'Nam, of course ;You mean coitus? ;What makes a man, Mr. Lebowski? ;What do you do, Mr. Lebowski? ;Nothing's fu**ed, Dude --Enjoyin' my coffee. Sorry, I wasn't listening ;Strikes and gutters, ups and downs ;Some burgers, some beers, a few laughs. Our fu**ing troubles are over, Dude ;Say, friend, ya got any more of that good sarsaparilla?A dialogue between the actor and his long-time spiritual guru explores the challenges of Bridges' Hollywood career and the ways in which Zen teachings have informed his efforts to do good in the modern world.Big Lebowski (Motion picture)Philosophy in motion pictures.Zen BuddhismDoctrines.Conduct of life.Glassman, Bernard(Bernard Tetsugen)edl2013-01-16aC0NTG",null,null,"OCLC","o785077610"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"155","IMPORT-1377205600.55222","00934cam a2200301Ia 4500ocn781681058OCoLC20130116200612.0120324s2012 nyua e 000 0 eng d016127134Uk9781455520589 (pbk.)1455520586(OCoLC)781681058BTCTAengBTCTABDXUKMGBOCOYDXCPSRCOQXZQPBWXOCORMNTGUtOrBLWNTGAPN1992.77.P676P67 2012791.4572 PORPortlandia :a guide for visitors /Fred Armisen, [et al.].New York, N.Y. :Grand Central Publishing,2012.192 p. :col. ill. ;21 cm.Portlandia (Television program)Portland (Or.)Humor.Armisen, Fred,1966-Brownstein, Carrie.JL2013-01-16aC0NTG",null,null,"OCLC","o781681058"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"156","IMPORT-1377205600.55222","01352cam a2200361Ia 4500ocn802295184OCoLC20130116205937.0120723s2012 nyua e 000 0 eng d 2012935560GBB273090bnb016131693Uk9781618930057 (hbk.)1618930052 (hbk.)(OCoLC)802295184UKMGBengUKMGBOCLCOBKXVP@NTGUtOrBLWNTGA796.8323796.83 SCHSchatz, Howard,1940-At the fights :inside the world of professional boxing /Howard Schatz ; executive producer/editor, Beverly J. Ornstein ; forward by Jim Lampley.New York :Sports Illustrated,c2012.240 p. :ill. (chiefly col.) ;37 cm.An award-winning photographer famed for his series of improvisational portraits of actors for Vanity Fair presents an oversized treasury of images that complement striking depictions of today's athletic stars with insightful sports commentary.\"Sports illustrated.\"Boxing.BoxingPictorial works.Ornstein, Beverly J.Sports illustrated.AMW2013-01-16aC0NTG",null,null,"OCLC","o802295184"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"157","IMPORT-1377205600.55222","01454cam a2200409Ia 4500ocn817945746OCoLC20130128214807.0121114s2012 nyu e 000 1 spa d80192897497804514188450451418840(OCoLC)817945746(OCoLC)801928974HT#HT#YDXCPBTCTABDXNTGUtOrBLWspaengNTGAPS3604.A9875863 DAY SPANISHDay, Sylvia.Bared to you.SpanishDesnuda ante ti /Sylvia Day.1a ed.New York :Berkley Books,2012.viii, 354 p. ;21 cm.Una novela de la serie crossfireIncludes a readers guide.In Spanish.Translation of: Bared to you.Era guapo y genial, irregular y ardiente. Me sentía atraída hacia él como a nada ni a nadie en toda mi vida. Ansiaba su contacto como una droga, aun sabiendo que me debilitaría. Yo era imperfecta, estaba dañada y él abría esas grietas mías con tanta facilidad.Man-woman relationshipsFiction.Erotic fiction.gsafdSpanish language editionFiction.localDay, Sylvia.Crossfire novel.Spanish.cme2013-01-28aC0NTG",null,null,"OCLC","o817945746"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"158","IMPORT-1377205600.55222","02315cam a2200433Ia 4500ocn794922014OCoLC20130614123751.0120316s2012 sp e 000 1 spa d77841797597884666473118466647317(OCoLC)794922014(OCoLC)778417975Spanish Pubs Llc, 8871 Sw 129 Ter, Miami, FL, USA, 33176, (305)2333365SAN 002-0400M$KM$KBTCTAYDXCPBDXTXGGPINTGUtOrBLWspaengNTGAPS813.5423863 KOS SPANISHKostigen, Thomas.Blood Line.SpanishLa línea de sangre /Thomas M. Kostigen ; traducción de Máximo González Lavarello.1a ed.Barcelona ;Miami :Ediciones B,2012.335 p. ;23 cm.La tramaText in Spanish.Translation of: Blood line.Michael Shea es un reconocido periodista de la BBC que trabaja como enviado especial en los lugares más conflictivos del planeta. En Oriente Próximo se entera de que tendrá lugar una reunión secreta entre el cabecilla de la milicia armada islamista y el secretario del ministro de Justicia israelí. Intenta infiltrarse y, tras ser descubierto, se ve forzado a huir. El gran secreto consiste en un tratado entre las dos partes por el cual se cede gas natural a los israelíes a cambio de dinero para armar a las milicias palestinas. En el negocio también está involucrada la mayor compañía de gas inglesa y su inversionista saudí más importante. Si la gente lo supiera, los resultados para los jefes serían desastrosos... Mientras tanto, los israelíes ponen una bomba en la sede de la BBC en Palestina. ¿Por qué de pronto Shea se convierte en el criminal más buscado del momento? ¿Qué tiene que ver su propio tío en todo el entramado? -- Cover, p. [4]JournalistsFiction.TerrorismFiction.Spanish language editionFiction.localGonzález Lavarello, Máximo.Trama (Ediciones B (Firm))cme2013-01-29aC0NTG",null,null,"OCLC","o794922014"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"159","IMPORT-1377205600.55222","01198cam a2200397Ma 4500ocn786041029OCoLC20130614123751.0120409s2011 sp e 000 1 spa d7536350309788466645201 (pbk.)8466645209 (pbk.)(OCoLC)786041029(OCoLC)753635030CPGCPGBTCTAYDXCPBDXPL#I5BOCLCOTMENTGUtOrBLWNTGAPQ[Fic]863 MAD SPANISHMadrid, Juan,1947-Brigada central.3,El hombre del reloj /Juan Madrid.Hombre del relojBrigada central IIIEd. rev. por el autor, 1a. ed.Barcelona :Ediciones B,2011.453 p. ;23 cm.Brigada centralLa tramaPoliceSpainFiction.RomaniesSpainFiction.Spanish language editionFiction.localMadrid, Juan,1947-Serie Brigada central.Trama (Ediciones B (Firm))cme2013-01-28aC0NTG",null,null,"OCLC","o786041029"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"160","IMPORT-1377205600.55222","01711cam a2200409 a 4500ocn783166282OCoLC20130614123751.0120711s2012 nyu e 000 1 spa 201202619397803079513730307951375(OCoLC)783166282DLCengDLCBTCTABDXOCLCOYDXCPMLYCDXNTGUtOrBLWpcce-sp---NTGAPQ6663.A7218M36 2012863/.6423863 MAR SPANISHMarías, Javier.Mañana en la batalla piensa en mí /Javier Marías ; prólogo de Elide Pittarello.1a ed. Vintage Español.Nueva York :Vintage Español,2012.354 p. ;21 cm.Text in Spanish.\"A married woman, whose husband is in London, invites another man, whom she hardly knows, for dinner. In her house a two-year-old boy is sleeping. Finally, when the man approaches her after dinner, the woman is feeling ill. She dies before ever becoming his lover. What to do with the corpse, the boy, the absent husband? What is the difference between life and death? This is one of the most passionate and emotional novels of recent times and surely the most lauded\"--Amazon.com.Married womenSpainMadridFiction.Television writersSpainMadridFiction.DeathFiction.SecrecyFiction.Madrid (Spain)Fiction.Spanish language editionFiction.localcme2013-01-28aC0NTG",null,null,"OCLC","o783166282"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"161","IMPORT-1377205600.55222","01916cam a2200433Ia 4500ocn806253786OCoLC20130614123751.0120815s2012 mx e 000 1 spa 8050512779786071119056 (pbk.)6071119057 (pbk.)(OCoLC)806253786(OCoLC)805051277LPULPUBTCTAYDXCPCPGCPLBDXNTGUtOrBLWspaitaNTGAPQ4873.A9532N4718 2012[Fic]863 MAZ SPANISHMazzantini, Margaret.Nessuno si salva da solo.SpanishNadie se salva solo /Margaret Mazzantini ; traducción de Carlos Gumpert.1a ed.Mexico, D.F. :Alfaguara,2012.218 p. ;24 cm.Text in Spanish; translated from the Italian.Translation of: Nessuno si salva da solo.\"Delia y Gaetano eran pareja. Ya no lo son, y han de aprender a asumirlo. Desean vivir tranquilos pero, al mismo tiempo, les inquieta y seduce lo desconocido. ¿En qué se equivocaron? No lo saben. La pasión del comienzo y la rabia del fi nal están todavía demasiado cercanas. En una época en la que parece que ya está todo dicho, sus palabras y silencios dejan al desnudo sus soledades, sus urgencias, sus recuerdos, y provocan brillos imprevistos al poner en escena, una noche de verano, el viaje del amor al desamor\"--Cover p. [4].Husband and wifeFiction.Marital conflictFiction.Separation (Law)Fiction.Anorexia nervosaFiction.Italian fictionTranslations into Spanish.Spanish language editionFiction.localGumpert, Carlos.cme2013-01-29aC0NTG",null,null,"OCLC","o806253786"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"162","IMPORT-1377205600.55222","00885cam a2200313 a 4500ocn738353144OCoLC20130116161237.0120201s2011 sp a e 000 1 spa 201241726797884991834428499183441(OCoLC)738353144DLCengDLCBTCTABDXPL#I5BNTGUtOrBLWpccNTGAPQ6719.A644E86 2011863 SAN SPANISHSantos, Jesús M.Esperanza /Jesús M. Santos.1. ed.Barcelona :Roca Editorial,2011.351 p. :ill. ;24 cm.Labrador Pérez, Esperanza,1922-2011Fiction.Spanish language editionFiction.localcsr2013-01-15aC0NTG",null,null,"OCLC","o738353144"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"163","IMPORT-1377205600.55222","02340cam a2200433 i 4500ocn798437876OCoLC20130129182513.0120621r20122011nyu e 000 1 spa 201202568579426366797801424269370142426938(OCoLC)798437876(OCoLC)794263667DLCengrdaDLCNVCOCLCOMLYJPLYDXCPBTCTANTGUtOrBLWpccNTGAPQ8180.43.R58M85 2012863/.723863 TRI SPANISHTriviño Argüello, Pilar.Las mujeres del Tío Sam :una novela /Pilar Triviño Argüello.New York, New York :C.A. Press, Penguin Group (USA),2012.vii, 280 pages ;23 cmtextrdacontentunmediatedrdamediavolumerdacarrierText in Spanish.\"Las mujeres del Tío Sam, una novela basada en hechos reales, narra las relaciones amorosas de mujeres colombianas de diferentes clases sociales, edades y procedencias que se involucraron con los norteamericanos que hacían parte de la ayuda de Estados Unidos al Plan Colombia. Valeria es una joven profesional que una noche cualquiera el azar cambia su vida y la de sus amigas. Se relacionan en un exclusivo y lujoso bar en Bogotá con militares del los EE. UU., que son hombres bellos, fortachones y, sobretodo, herméticos que no dejan pistas de sus actividades militares en Colombia. Sus atenciones las seducen y entran en un mundo desconocido que las llevan a vivir experiencias amorosas, eróticas, de prostitución, de esperanza y de decepción. Ellas están ilusionadas a alcanzar, a través del amor y la pasión, el \"American Dream\"--Cover p. [4].Man-woman relationshipsFiction.SoldiersUnited StatesFiction.SoldiersColombiaFiction.ColombiaFiction.Spanish language editionFiction.localReproduction of (manifestation):Triviño Argüello, Pilar.Las mujeres del Tío SamBogatá [Colombia] : Editorial Oveja Negra, 2011(DLC) 2011443710cme2013-01-29aC0NTG",null,null,"OCLC","o798437876"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"164","IMPORT-1377205600.55222","01767cam a22004574a 4500ocn779266072OCoLC20130116231652.0120813s2012 nyuaf e 000 0aeng 2012030594817577994978145169146714516914679781451691535 (ebook)145169153X (ebook)9781451691504 (pbk.)1451691505 (pbk.)(OCoLC)779266072(OCoLC)817577994DLCengDLCBTCTABDXYDXCPOCLCOIK2LIVFOLLTBWXTXBXLIG#UtOrBLWpccn-us---a-af---NTGADS371.4123.K67G58 2012958.104/742B23958.104742 GIUGiunta, Salvatore A.(Salvatore Augustine),1985-Living with honor /Salvatore A. Giunta ; with Joe Layden.1st Threshold Editions hardcover ed.New York :Threshold Editions,2012.294 p., [8] p. of plates :ill. ;24 cm.A Medal of Honor recipient shares the story of his military career, recounting his deployment to Afghanistan's volatile Korengal Valley and his life-risking efforts to provide medical aid to wounded fellow soldiers during an October 2007 Taliban ambush.Giunta, Salvatore A.(Salvatore Augustine),1985-Afghan War, 2001-CampaignsAfghanistanKorangal Valley.Afghan War, 2001-Personal narratives, American.Medal of HonorBiography.SoldiersUnited StatesBiography.Layden, Joe.djn 2013-01-16aC0NTG",null,null,"OCLC","o779266072"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"165","IMPORT-1377205600.55222","02176cam a2200385M 4500ocn809615904OCoLC20130116184544.0120910s2012 xx e 000 0 eng d14555228219781455522828(OCoLC)809615904YDXCPYDXCPUUCOCLCOUtOrBLWNTGA629.45009223B HERNANDEZ SPANISHHernández, José M.,1962-Alcanzar las estrellas :la historia inspiradora de un trabajador migrante que se convirtió en astronauta /José M. Hernández [and Monica Rojas Rubin ; prólogo por Emilio Estefan].Cosechador de estrellasTitle in English:Reaching for the stars :the inspiring story of a migrant farmworker turned astronaut1a ed.[New York] :Grand Central Pub.,2012.xvi, 282 p., [16] p. of plates :col. ill. ;24 cm.Title on cover: El cosechador de estrellas.Born into a family of migrant workers, toiling in the fields by the age of six, Jose M. Hernández dreamed of traveling through the night skies on a rocket ship. [The book] is the inspiring story of how he realized that dream, becoming the first Mexican-American astronaut. Hernández didn't speak English till he was 12, and his peers often joined gangs, or skipped school. And yet, by his twenties he was part of an elite team helping develop technology for the early detection of breast cancer. He was turned down by NASA eleven times on his long journey to donning that famous orange space suit. Hernández message of hard work, education, perseverance, of \"reaching for the stars,\" makes this a classic American autobiography.Hernández, José M.,1962-AstronautsUnited StatesBiography.Migrant agricultural laborersCaliforniaBiography.Spanish language editionNonfiction.localRubin, Monica Rojas.Estefan, Emilio.lmc2013-01-16aC0NTG",null,null,"OCLC","o809615904"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"166","IMPORT-1377205600.55222","01236cam a2200373 a 4500ocn779265891OCoLC20130116215440.0120409s2013 nyu e 000 0aeng 20120113249781451675368 (hbk.)1451675364 (hbk.)9781451675382 (ebk.)1451675380 (ebk.)(OCoLC)779265891DLCengDLCIG#BTCTABDXOCLCOGK5YDXCPJP3VP@NTGUtOrBLWn-us---NTGAPN2287.L28555A3 2013791.4302/8092B23B LAWLESSLawless, Wendy.Chanel bonfire :a memoir /Wendy Lawless.1st Gallery Books hardcover ed.New York :Gallery Books,2013.295 p. ;22 cm.\"Wendy Lawless's ... memoir of resilience in the face of an unstable alcoholic and suicidal mother\"--Provided by publisher.Lawless, Wendy.Motion picture actors and actressesUnited StatesBiography.ActressesUnited StatesBiography.JL2013-01-16aC0NTG",null,null,"OCLC","o779265891"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"167","IMPORT-1377205600.55222","02375cam a2200421 a 4500ocn788281963OCoLC20130116223211.0120924s2013 kyua e bq s001 0beng 20120346429780813136905 (hardcover : alk. paper)0813136903 (hardcover : alk. paper)9780813136912 (pdf)0813136911 (pdf)9780813140384 (epub)0813140382 (epub)40021728439(OCoLC)788281963DLCengDLCYDXBTCTABDXOCLCOYDXCPOCLCOYUSNTGUtOrBLWpccNTGAPN2287.M83A55 2013791.43/028/092B23B MURRAYAnkerich, Michael G.,1962-Mae Murray :the girl with the bee-stung lips /Michael G. Ankerich ; foreword by Kevin Brownlow.Lexington, Ky. :University Press of Kentucky,c2013.xi, 376 p. :ill. ;24 cm.Screen classicsIncludes bibliographical references, filmography and index.Untangling Mae Murray's tangled beginnings, 1885-1899 --Dancing into the new century, 1900-1907 --Ziegfeld and the millionaire, 1908-1911 --Life is a cabaret, 1912-1914 --From footlights to kliegs, 1915 --The disillusions of a dream girl, 1916 --Ready for my close-ups, Mr. Lasky!, 1917 --The delicious little Mae, 1918-1919 --On with the dance, 1920 --Strutting like a peacock through Tiffany's, 1921-1922 --Mae the enchantress, 1923-1924 --the merry widow and the dirty Hun, May 1924 March 1925 --From merry widow to gay divorcée, 1925 --Princess Mdivani, 1926 --The lion's roar, the baby's cry, 1927 --A world of cheap imitations, 1928 --The sound of bee-stung lips, 1929-1931 --Oh, brother!, 1932 --From a prince to a toad, 1933 --Losing Koran, 1934-1940 --Outliving fame, 1941-1957 --Self-enchantment, 1958 --1960 --A star in twilight, 1961-1965 --Epilogue --Acknowledgments --Professional theater --Filmography.Murray, Mae,1885-1965.Motion picture actors and actressesUnited StatesBiography.Screen classics (Lexington, Ky.)JL2013-01-16aC0NTG",null,null,"OCLC","o788281963"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"168","IMPORT-1377205600.55222","01446cam a2200409Ia 4500ocn792890096OCoLC20130614123751.0120429s2012 sp a e 000 0aspa c97884663257078466325700(OCoLC)792890096BTCTAengBTCTAYDXCPBDXGPISJPUtOrBLWspaengNTGA813.5423B OATES SPANISHOates, Joyce Carol,1938-Widow's story.SpanishMemorias de una viuda /Joyce Carol Oates ; traducción de María Luisa Rodríguez Tapia.1a ed.Madrid :Punto De Lectura,2012.[471] p. :ill. ;19 cm.Original title: Widow's story.Joyce Carol Oates shares her struggle to comprehend a life absent of the partnership that had sustained and defined her for nearly half a century.Oates, Joyce Carol,1938-Family.Smith, Raymond J.Death and burial.WidowsUnited StatesBiography.Loss (Psychology)Bereavement.Authors, American20th centuryFamily relationships.Spanish language editionNonfiction.localRodríguez Tapia, María Luisa.lmc2013-01-16aC0NTG",null,null,"OCLC","o792890096"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"169","IMPORT-1377205600.55222","01094nam a2200301Ka 4500ocn824509599OCoLC20130116182040.0130116s2009 wauac e 000 0aeng d9780974256832 (pbk.)0974256838 (pbk.)(OCoLC)824509599NTGNTGNTGUtOrBLWNTGA979.777B23B THOMSENThomsen, Ralph Einar.Memories of Richmond Beach :Ralph Einar Thomsen's recollections of small-town life on Puget Sound in the 1920s - 1940s /[by Ralph E. Thomsen].Ralph Einar Thomsen's recollections of small-town life on Puget Sound in the 1920s - 1940sSeattle, Wash. :Ralph E. Thomsen ;Shoreline, Wash. :Shoreline Historical Museum,c2009.viii, 72 p. :ill., maps ;23 cm.Thomsen, Ralph Einar.Richmond Beach (Wash.)Biography.Richmond Beach (Wash.)History.slh2013-01-16aC0NTG",null,null,"OCLC","o824509599"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"170","IMPORT-1377205600.55222","01928cam a2200481Ia 4500ocn679931885OCoLC20130614123751.0101107s2011 nyua a 000 1 spa d9781442422926 (boardbook)1442422920 (boardbook)(OCoLC)679931885BTCTAengBTCTAIEPJTHIF9BDXNCLNTGUtOrBLWspaengNTGAPZ74.3.M33483 2011[E]23E MARTIN SPANISHMartin, Bill,1916-2004.Chicka chicka ABC.SpanishChica chica bum bum ABC /por Bill Martin Jr. y John Archambault ; ilustrado por Lois Ehlert ; [traducción de Alexis Romay].Chica chica bum bum A B C1a ed. en lengua española.Nueva York :Simon & Schuster Libros Para Niños,2011.1 v. (unpaged) :col. ill. ;20 x 14 cm.Text in Spanish.Texto en español.Translation of: Chicka chicka ABC.On board pages.\"Publicado originalmente en inglés en 1993 con el título Chicka chicka ABC por Little Simon\"--P. 4 of cover.\"A le dijo a B, y B le dijo a C: \"En el cocotero, ¡allí de esperare!\" Pero cuando todas las letras del alfabeto suben al cocotero... ¡Oh, no¡ Chica chica ¡BUM! ¡Bum!An alphabet rhyme/chant that relates what happens when the whole alphabet tries to climb a coconut tree.English languageAlphabetJuvenile fiction.Alphabet rhymes.Stories in rhyme.Spanish language editionJuvenileFiction.localArchambault, John.Ehlert, Lois.Romay, Alexis.cme2013-01-29aC0NTG",null,null,"OCLC","o679931885"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"171","IMPORT-1377205600.55222","01199cam a2200361 a 4500ocm26553002 OCoLC20130614123751.0920804s1994 nyua a 000 1 eng 92029020 3450661222939385503992244599780399224454(OCoLC)26553002(OCoLC)34506612(OCoLC)229393855DLCengDLCVPWAZUOCLBAKERBTCTAYDXCPUPZMR0CS1CQUIG#TAMSABDXOCLCQOCLCONTGUtOrBLWlcacNTGAPZ7.R1936Go 1994PS3568.A7197G66 1994[E]20E RATHMANNRathmann, Peggy.Good night, Gorilla /Peggy Rathmann.New York :Putnam,c1994.1 v. (unpaged) :col. ill. ;19 x 22 cm.An unobservant zookeeper is followed home by all the animals he thinks he has left behind in the zoo.Zoo animalsJuvenile fiction.ZoosJuvenile fiction.Picture books for children.gsafdsc2013-01-16aC0NTG",null,null,"OCLC","o26553002"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"172","IMPORT-1377205600.55222","01450cam a2200421Ia 4500ocm44114877 OCoLC20130614123751.0000524s1981 nyua a 000 0 eng d 85042990 81043037 767580702006445002397800644500279780808529941 (lib. bdg.)0808529943 (lib. bdg.)0690041225 (Cover)0690041233(OCoLC)44114877(OCoLC)767580702CLOCLOOCLOCLCGOTPIG#BDXNTGUtOrBLWNTGAQB801.7.B73 1981523.819E523.8 BRABranley, Franklyn Mansfield,1915-2002.The sky is full of stars /by Franklyn M. Branley ; illustrated by Felicia Bond.New York, NY :HarperCollins,c1981.34 p. :ill. (some col.) ;21 x 22 cm.Let's-read-and-find-out science. Stage 2Explains how to view stars and ways to locate star pictures, known as constellations, throughout the year.\"HarperTrophy.\"--Pg. [4] of cover.StarsJuvenile literature.ConstellationsJuvenile literature.Bond, Felicia,ill.Let's-read-and-find-out science.Stage 2.bp2013-01-16aC0NTG",null,null,"OCLC","o44114877"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"173","IMPORT-1377205600.55222","01217cam a2200385Ia 4500ocn740627341OCoLC20130614123751.0110706s2012 nyu e 000 1 eng d016009425Uk97803125450620312545061(OCoLC)740627341BTCTAengBTCTAUKMGBBDXOCOJRSEINCPNTGUtOrBLWNTGAPR9199.4.B364B33 2012FIC BARANTBarant, D. D.Back from the undead /D.D. Barant.St. Martin's pbk. ed.New York, N.Y. :St. Martins Paperbacks,2012.330 p. ;18 cm.The bloodhound files ;bk. 05Jace faces a danger like no other as she seeks justice for a group of condemned children.VampiresFiction.Criminal profilersFiction.Child traffickingFiction.Vancouver (B.C.)Fiction.Mystery fiction.gsafdFantasy fiction.Barant, D. D.Bloodhound files ;bk. 05.AMW 20120409aC0NTG",null,null,"OCLC","o740627341"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"174","IMPORT-1377205600.55222","01481cam a2200421 a 4500ocn768728884OCoLC20130116211117.0111216s2013 nyu e 000 1 eng 20110480009781451666342 (pbk.)1451666349 (pbk.)9781451666410 (ebk.)1451666411 (ebk.)(OCoLC)768728884DLCengDLCIG#OCLCOBTCTABDXGL4YDXCPCGPOCLCOABGNTGUtOrBLWNTGAPS3606.R36535T48 2013813/.623FIC FRANCISFrancis, Wendy.Three good things /Wendy Francis.3 good things1st Simon & Schuster trade pbk. ed.New York :Simon & Schuster Paperbacks,2013.235 p. ;21 cm.\"A novel\"--Cover.The story of two sisters, one of whom opens a bake shop that features the traditional Danish pastry called kringle, the other of whom is a successful attorney with a baby at home, both of whom long for the guidance of their deceased mother.Includes a reading group guide.SistersFiction.Divorced womenFiction.New mothersFiction.Mothers and daughtersFiction.BakeriesFiction.edl2013-01-16aC0NTG",null,null,"OCLC","o768728884"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"175","IMPORT-1377205600.55222","01980cam a2200421 a 4500ocn778422664OCoLC20130116194608.0120712s2012 nyub e 000 f eng 2012028307016142751Uk9780312607074 (hardcover)0312607075 (hardcover)9781466802506 (e-book)1466802502 (e-book)(OCoLC)778422664DLCengDLCYDXBTCTABDXYDXCPUKMGBOCLCOIUKJP3LEBBWXNTGUtOrBLWpccn-us-vaNTGAPS3557.I4945V53 2012813/.5423FIC GINGRICHGingrich, Newt.Victory at Yorktown /Newt Gingrich, William R. Forstchen, and Albert S. Hanser, contributing editor.1st ed.New York :Thomas Dunne Books,c2012.xiv, 345 p. :maps ;25 cm.General Washington, seeking to end a three-year stalemate, embarks on a secret three-hundred-mile forced march of his entire army to meet the French navy's Chesapeake Bay blockade and capture Cornwallis's entire force. Sgt. Peter Wellsley must pave the way for the army, neutralizing any loyalists who might provide warning. On the other side, Allen Van Dorn receives reports from civilians that something is afoot and is tasked to find out what. As Wellsley moves to block any leaks, Van Dorn tries to penetrate the screen. When one of the former friends is captured, both must decide where their true loyalties lie during the heat of the Battle of Yorktown.Yorktown (Va.)HistorySiege, 1781Fiction.United StatesHistoryRevolution, 1775-1783Fiction.Historical fiction.Forstchen, William R.autHanser, Albert S.edtslh 20121203aC0NTG",null,null,"OCLC","o778422664"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"176","IMPORT-1377205600.55222","01922cam a2200397 a 4500ocn783162692OCoLC20130116215011.0120925s2013 nyu e 000 1 eng 2012036180016229234Uk9780393338621 (pbk.)0393338622 (pbk.)(OCoLC)783162692DLCengDLCIG#BTCTABDXOCLCOUKMGBIH9YDXCPCOOBWXOCPNTGUtOrBLWa-ii---NTGAPR9499.4.J676I45 2013823/.9223FIC JOSEPHJoseph, Manu.The illicit happiness of other people /Manu Joseph.1st American ed.New York :W. W. Norton & Co.,2013.344 p. ;21 cm.Seventeen-year-old Unni has done something terrible. The only clue to his actions lies in a comic strip he has drawn, which has fallen into the hands of his father Ousep - a nocturnal anarchist with a wife who is fantasizing about his early death. Ousep begins investigating the extraordinary life of his son, but as he circles closer and closer to the truth, he unravels a secret that shakes his family to the core. Set in Madras in the 1990s, where every adolescent male is preparing for the toughest exam in the world, this is a powerful and darkly comic story involving an alcoholic's probe into the minds of the sober, an adolescent cartoonist's dangerous interpretation of absolute truth, an inner circle of talented schizophrenics and the pure love of a 12-year-old boy for a beautiful girl.FamiliesIndiaFiction.Chennai (India)Fiction.Black humor.gsafdLove stories.gsafdBlack humor.Love stories.JL2013-01-16aC0NTG",null,null,"OCLC","o783162692"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"177","IMPORT-1377205600.55222","01431cam a2200433 i 4500ocn783161163OCoLC20130614123752.0120523s2012 ilu e 000 f eng 20120208209781564787859 (cloth : alk. paper)1564787850 (cloth : alk. paper)9781564788139 (pbk. : alk. paper)156478813X (pbk. : alk. paper)(OCoLC)783161163DLCengDLCYDXBTCTABDXYDXCPOCLCOIADORXCDXGZMVP@NTGUtOrBLWengnorpccNTGAPT8951.29.A39S4513 2012839.82/37423FIC SAETERBAKKENSæterbakken, Stig,1966-2012.Selvbeherskelse.EnglishSelf-control /Stig Sæterbakken ; translated by Seán Kinsella.Champaign :Dalkey Archive Press,2012.154 p. ;21 cm.Norwegian literature series\"Originally published in Norwegian as Selvbeherskelse by J.W. Cappelen, Oslo, 1998.\"ParalyticsFiction.FamiliesFiction.Married peopleFiction.HusbandsFiction.Kinsella, Seán.Norwegian literature series.JL2013-01-16aC0NTG",null,null,"OCLC","o783161163"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"178","IMPORT-1377205600.55222","01769cam a2200337Ia 4500ocn823170908OCoLC20130116194503.0130102s2013 cau e 000 1 eng d7957596119781597804455 (pbk.)1597804452 (pbk.)(OCoLC)823170908(OCoLC)795759611IMRIMRBTCTABDXOCLCONTGUtOrBLWNTGAFIC TEPPOTeppo, Mark.Earth thirst /Mark Teppo.First ed.San Francisco, CA :Night Shade Books,2013.277 p. ;25 cm.\"Vampires are our last hope\" -- front cover.\"Humanity -- over-breeding, over-consuming -- is destroying the very planet they call home. Multi-national corporations despoil the environment, market genetically-modified crops to control the food supply, and use their wealth, influence and private armies to crush anything, and anyone, that gets in the way of their profits. Nothing human can stop them. But something unhuman might. Once they did not fear the sun. Once they could breathe the air and sleep where they chose. But now they can rest only within the uncontaminated soil of Mother Earth - and the time has come for them to fight back against the ruthless corporations that threaten their immortal existence. They are the last guardians of paradise, more than human but less than angels. They call themselves the Arcadians. We know them as vampires...\" -- back cover.VampiresFiction.OverpopulationFiction.PollutionFiction.Night Shade Books.JL2013-01-16aC0NTG",null,null,"OCLC","o823170908"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"179","IMPORT-1377205600.55222","01644cam a2200445 a 4500ocm57142725 OCoLC20130116212244.0041117t20052001nyu e 000 1 eng 200406364956780652623825617219073039781585674824158567482697815856770921585677094(OCoLC)57142725(OCoLC)56780652(OCoLC)62382561(OCoLC)721907303DLCengDLCIG#WIQSRBBAKERBTCTAYDXCPGO3YW6ABGYVROCLCQBDXUtOrBLWe-uk-enNTGAPR6072.I525S66 2005823/.91422FIC VINCENZIVincenzi, Penny.Something dangerous /Penny Vincenzi.Woodstock, NY :Overlook Press,2005, c2001.710 p. ;23 cm.Born into the powerful Lytton family, twins Adele and Venetia Lytton grow up in a golden world, until the dark specter of Nazi Germany begins to loom over Europe and the Lytton family faces challenges for which they are unprepared.Lytton family (Fictitious characters)Fiction.Publishers and publishingFiction.SistersFiction.TwinsFiction.EnglandFiction.Great BritainHistoryGeorge V, 1910-1936Fiction.Great BritainHistoryGeorge VI, 1936-1952Fiction.Domestic fiction.Love stories.gsafdJL2012-05-18aC0NTG",null,null,"OCLC","o57142725"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"180","IMPORT-1377205600.55222","01895cam a2200409 a 4500ocn779266197OCoLC20130116182912.0120914s2013 nyub e b 000 1 eng 20120349879781451674705 (pbk.)1451674708 (pbk.)(OCoLC)779266197DLCengDLCIG#BTCTABDXYDXCPOCLCOIK2VP@BWXNTGUtOrBLWn-us-nyn-us-njNTGAPS3623.A336S48 2013813/.623FIC WADEWade, Christine.Seven locks :a novel /Christine Wade.7 locks1st Atria Paperback ed.New York :Atria Paperback,2013.vi, 329 p. :map ;21 cm.\"The Hudson River Valley, 1769. A man mysteriously disappears without a trace, abandoning his wife and children on their farm at the foot of the Catskill Mountains. At first many believe that his wife, who has the reputation of being a scold, has driven her husband away, but as the strange circumstances of his disappearance circulate, a darker story unfolds. And as the lines between myth and reality fade in the wilderness, and an American nation struggles to emerge, the lost man's wife embarks on a desperate journey to find the means to ensure her family's survival\"--P. [4] of cover.Includes a reading group guide.Includes bibliographical references (p. 327).FamiliesNew York (State)Fiction.Farm lifeNew York (State)Fiction.Hudson River Valley (N.Y. and N.J.)History18th centuryFiction.Catskill Mountains Region (N.Y.)Fiction.Historical fiction.gsafdedl2013-01-16aC0NTG",null,null,"OCLC","o779266197"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"181","IMPORT-1377205600.55222","01669cam a2200409Ia 4500ocn761854367OCoLC20130614123752.0111120s2011 sp a c 000 1 spa d9788479429072 (tapa dura)8479429070 (tapa dura)(OCoLC)761854367BTCTAengBTCTABDXDWPJTHNTGUtOrBLWspadutNTGAJ JAQUET SPANISHJaquet, Gertie.Het snoepprinsesje.SpanishQué princesa tan golosa! /Gertie Jaquet ; [traducción del neerlandés, Nadine Beliën].Madrid :Macmillan Iberia,c2011.37 p. :ill. ;22 cm.LibrosaurioText in Spanish.Translation of: Het snoepprinsesje.\"Malena es una princesa que se pasa el día comiendo regaliz, caramelos y galletas de chocolate. Y al final, se encuentra tan inflada como un globo y toda desdichada. Los Reyes están alarmados: ¿ćomo podrán hacer que su hija sea feliz? La inesperada llegada de un músico, hará que su vida cambie a ritmo de rap\"--Cover p. [4].Obesity in childrenJuvenile fiction.ConfectioneryJuvenile fiction.PrincessesJuvenile fiction.Rap (Music)Juvenile fiction.Children's stories, DutchTranslations into Spanish.Spanish language editionJuvenileFiction.localBëlien, Nadine.Librosaurio.cme2013-01-24aC0NTG",null,null,"OCLC","o761854367"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"182","IMPORT-1377205600.55222","01833cam a2200445Ia 4500ocn747534402OCoLC20130614123752.0110821s2012 sp a c 000 1 spa d97884152352178415235216(OCoLC)747534402BTCTAengBTCTABDXOQXIMDTXWHSNTGUtOrBLWspaitaNTGA[Fic]23J PIRATESCHL SPANISH V.7Stevenson, Steve.Diario di Capitan Barracuda.SpanishEl diario del Capitán Barracuda /Steve Stevenson ; ilustraciones de Stefano Turconi ; [traducción, Julia Osuna Aguilar].At head of title: Laescuela de piratas1a. ed.Barcelona :Pirueta,2011.82 p. :col. ill. ;21 cm.Escuela de Piratas ;7Text in Spanish.Ha comenzado el segundo curso en la Escuela de Piratas y los Lobitos de Mar, para variar, arman una de las suyas: dejan que se les escape delante de sus narices la tripulación más cafre, la de los novatos. ¿Conseguirán encontrarla? ¿Y qué ocurrirá cuando, en su búsqueda, se tropiecen con un pirata..., o, mejor dicho, con su esqueleto?PiratesJuvenile fiction.Treasure trovesJuvenile fiction.IslandsJuvenile fiction.Buried treasureFiction.Adventure and adventurersFiction.Spanish language editionJuvenileFiction.localTurconi, Stefano,1974-ill.Osuna Aguilar, Julia.Stevenson, Steve.Scuola dei Pirati.Spanish ;7.bp2013-01-15aC0NTG",null,null,"OCLC","o747534402"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"183","IMPORT-1377205600.55222","01873cam a2200361Ia 4500ocn793220853OCoLC20130116151516.0120501s2012 flua c 000 0bspa d9781614353416 (pbk.)1614353417 (pbk.)(OCoLC)793220853BTCTAengBTCTABDXIFANYPGO3CLEUtOrBLWNTGAPQ8097.N4Z65 2012JB NERUDA SPANISHLázaro León, Georgina.Conoce a Pablo Neruda /Georgina Lázaro León ; ilustraciones de Valeria Cis.Pablo NerudaDoral, Fla. :Alfaguara,c2012.27 p. :col. ill. ;26 cm.Personajes del mundo hispánico\"Pablo Neruda fue senador, cónsul, embajador, académico de la lengua y uno de los poetas más reconocidos de la literatura universal. Y a pesar de ser un señor tan importante, conservaba su alma de niño. Coleccionaba objetos y libros como si fueran juguetes, se pintaba bigotes con corcho quemado, le encantaban los pájaros y, en una ocasión, domesticó una mangosta. ¿Quieres conocer a Pablo Neruda? ¡Abre este libro y empieza a leer!\"--P. [4] of cover.Pablo Neruda was many things in his life, but is best known for being a great poet. Yet he never lost his ability to be childlike and had a wonderful sense of humor. Want to get to know Pablo Neruda? Open this book and start reading!Neruda, Pablo,1904-1973Juvenile literature.Poets, Chilean20th centuryBiographyJuvenile literature.Spanish language editionJuvenileNonfiction.localCis, Valeria.Personajes del mundo hispánico.lmc2013-01-16aC0NTG",null,null,"OCLC","o793220853"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"184","IMPORT-1377205600.55222","01563cam a2200433Ii 4500ocn820835985OCoLC20130614123752.0121211s2013 nyu e 000 1 eng d78828874397803079857670307985768(OCoLC)820835985(OCoLC)788288743UPZengrdaUPZBTCTABDXYDXCPIZ2MR0VP@NTGUtOrBLWn-us-meNTGAPS3619.H5426S78 2013813/.623M SHIELDSShields, Kieran,author.A study in revenge :[a novel] /Kieran Shields.First edition.New York :Crown Publishers,[2013]372 pages ;24 cmtextrdacontentunmediatedrdamediavolumerdacarrierA sequel to The Truth of All Things finds late-19th century police detective Archie Lean and his half-Native American partner, Perceval Grey, investigating the theft of a recently buried body and the staging of a bizarre occult scene that is linked to a centuries-old magical relic.PoliceMainePortlandFiction.Occultism and criminal investigationFiction.Grave robbingFiction.RevengeFiction.Portland (Me.)Fiction.Historical fiction.gsafdMystery fiction.gsafdJL2013-01-16aC0NTG",null,null,"OCLC","o820835985"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"185","IMPORT-1377205600.55222","01944cam a2200361Ia 4500ocn788241518OCoLC20130116192010.0120418t20132012nyu e 000 1 eng d9780451238931 (pbk.)0451238931 (pbk.)(OCoLC)788241518BTCTAengBTCTABDXJQWZS3CO2IEPNTGUtOrBLWNTGAPS3568.O843485S43 2012813.5423ROM ROSSRoss, JoAnn.Sea glass winter /JoAnn Ross.New York :Signet,[2013], c2012.388 p. ;18 cm.A Shelter Bay novel\"As an Explosive Ordnance Disposal Specialist, Dillon Slater had one of the most dangerous jobs in the military. Now, he's enjoying the pace of life in Shelter Bay, where he teaches high school physics. He still gets to blow things up, but as the school basketball coach he also gets to impart leadership skills. His latest minefield: fifteen-year-old Matt Templeton; and Matt's irresistible mother. Claire Templeton moved her troubled teenage son to the small town of Shelter Bay to escape the bad influences at his school in L.A. But when his attitude earns her a visit from the handsome basketball coach, she wonders if this role model might be too much of a temptation--for her. Because though she isn't looking for a relationship, she can't seem to resist Dillon's playful charm. But what she doesn't realize is that Dillon isn't playing games; he's playing for keeps.\"-- P. [4] of cover.Fatherless familiesFiction.Problem youthFiction.Teacher-counselor relationshipsFiction.Man-woman relationshipsFiction.Love stories.gsafdRoss, JoAnn.Shelter Bay novel.JL2013-01-16aC0NTG",null,null,"OCLC","o788241518"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"186","IMPORT-1377205600.55222","01747cam a2200373Ia 4500ocn779266065OCoLC20130116191416.0120308s2013 nyu e 000 1 eng d97814516715991451671598(OCoLC)779266065BTCTAengBTCTABDXFOLLTJO3YW6IZ8NTGUtOrBLWNTGA813/.623ROM SHOWALTERShowalter, Gena.Last kiss goodnight /Gena Showalter.1st Pocket Books pbk. ed.New York :Pocket Books,2013.436 p. ;18 cm.Otherworld assassin\"Pocket Books paranormal romance\"--Spine.Black ops agent Solomon Judah awakens caged and bound in a twisted zoo where otherworlders are the main attraction.Vika Lukas, the owner's daughter, is tasked with Solo's care and feeding. The monster inside him yearns to kill her on sight, even though she holds the key to his escape. But the human side of him realizes the beautiful deaf girl is more than she seems--she's his. Vika endures the captives' taunts and loathing, hoping to keep them alive even if she can't free them. Only, Solo is different-- he protects her. But as hostility turns to forbidden romance, his feelings for her will be used against him ... and he'll be put to a killer test.AssassinsFiction.Man-woman relationshipsFiction.Deaf womenFiction.Paranormal romance stories.Occult fiction.gsafdShowalter, Gena.Otherworld assassin.JL2013-01-16aC0NTG",null,null,"OCLC","o779266065"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"187","IMPORT-1377205600.55222","01829cam a2200517 a 4500ocn781939269OCoLC20130614123752.0120327s2013 nyu d 000 1 eng 20120061459780385742061 (hbk.)0385742061 (hbk.)9780375990397 (lib. bdg.)0375990399 (lib. bdg.)9780375979972 (ebk.)0375979972 (ebk.)(OCoLC)781939269DLCengDLCIG#BTCTABDXYDXCPSINLBJAGOCLCOIK2NTGUtOrBLWn-us-nyNTGAPZ7.C7834Jan 2013[Fic]23Y COONEYCooney, Caroline B.Janie face to face /Caroline B. Cooney.1st ed.New York :Delacorte Press,c2013.345 p. ;22 cm.At college in New York City, Janie Johnson, aka Jennie Spring, seems to have successfully left behind her past as \"The face on the milk carton,\" but soon she, her families, and friends are pursued by a true-crime writer who wants their help in telling her kidnapper's tale.KidnappingJuvenile fiction.AuthorshipJuvenile fiction.College stories.Man-woman relationshipJuvenile fiction.Identity (Psychology)Juvenile fiction.FamiliesJuvenile fiction.New York (N.Y.)Juvenile fiction.Universities and collegesFiction.LoveFiction.IdentityFiction.Family lifeFiction.College stories.Love stories.gsafdsc2013-01-16aC0NTG",null,null,"OCLC","o781939269"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"188","IMPORT-1377205600.55222","01667cam a22004938a 4500ocn793503410OCoLC20130614123752.0120501s2013 nyu d 000 1 eng 201201299397814424439901442443995(OCoLC)793503410DLCengDLCBTCTABDXVHPOCLCOILCUPZNTGUtOrBLWlcacpccNTGAPZ7.S3818Fal 2013[Fic]23Y SCHROEDERSchroeder, Lisa.Falling for you /Lisa Schroeder.1st Simon Pulse hardcover ed.New York :Simon Pulse,2013.355 p. ;22 cm.Very good friends, her poetry notebooks, and a mysterious \"ninja of nice\" give seventeen-year-old Rae the strength to face her mother's neglect, her stepfather's increasing abuse, and a new boyfriend's obsessiveness.Family problemsFiction.Dating (Social customs)Juvenile fiction.High schoolsJuvenile fiction.SchoolsFiction.PoetryJuvenile fiction.StepfathersJuvenile fiction.FloristsJuvenile fiction.Family problemsFiction.Dating (Social customs)Fiction.High schoolsFiction.SchoolsFiction.PoetryFiction.StepfathersFiction.FloristsFiction.Young adult fiction.JL2013-01-16aC0NTG",null,null,"OCLC","o793503410"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"189","IMPORT-1377205600.55222","01107cam a2200349 a 4500ocn794037285OCoLC20130614123752.0120918s2013 nyu d 000 1 eng 20120316289781595145994 (hbk.)1595145990 (hbk.)(OCoLC)794037285DLCengDLCIG#BTCTABDXCO2OCLCOIK2NTGUtOrBLWNTGAPZ7.Y89592Pap 2013[Fic]23Y YOVANOFFYovanoff, Brenna.Paper valentine /Brenna Yovanoff.New York :Razorbill,c2013.304 p. ;22 cm.Followed everywhere by the ghost of her recently deceased best friend, Hannah investigates the serial murders of young girls in her community.GriefJuvenile fiction.Ghost stories.Serial murderersJuvenile fiction.GhostsFiction.Ghost stories.gsafdsc2013-01-16aC0NTG",null,null,"OCLC","o794037285"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"190","IMPORT-1377205600.55222","01297cam a2200385 a 4500ocn809789695OCoLC20130614123752.0120910s2012 ilua d 000 0ceng 20120340449781608461561 (hbk.)1608461564 (hbk.)(OCoLC)809789695DLCengDLCIG#YDXCPNTGUtOrBLWn-us---NTGACT217.C44 2012303.48/40922B23Y303.484092 ONE101 changemakers :rebels and radicals who changed US history /edited by Michele Bollinger and Dao X. Tran.One hundred one changemakersHundred one changemakersChicago, IL :Haymarket Books,2012.ix, 215 p. :ill. ;29 cm.Social reformersUnited StatesBiographyJuvenile literature.Social movementsUnited StatesJuvenile literature.United StatesBiographyJuvenile literature.ReformersUnited StatesBiography.United StatesJuvenile biography.Bollinger, Michele.Tran, Dao X.AMW2013-01-16aC0NTG",null,null,"OCLC","o809789695"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"191","IMPORT-1377205600.55222","01239cam a2200337Ka 4500ocn760977475OCoLC20130116193926.0111112s2012 nyua e 000 0 eng d9781609787110 (pbk.)1609787110 (pbk.)(OCoLC)760977475BTCTAengBTCTAYDXCPBDXDPLNTGUtOrBLWNTGAY570.76 STA 2013/2014Stabler, Linda Brooke.AP biology 2013-2014 /Linda Brooke Stabler, Mark Metz, Paul Gier.Advanced placement biology 2013-2014Kaplan AP biology 2013-2014New York :Kaplan,c2012.xi, 317 p. :ill. ;; 28 cm.\"[Includes] two full-length practice tests, detailed answer explanations, score-raising strategies and tips, diagnostic test, end-of-chapter quizzes\"--Cover.BiologyExaminationsStudy guides.Advanced placement programs (Education)ExaminationsStudy guides.College entrance achievement testsStudy guides.Metz, Mark.Gier, Paul.csr2013-01-16aC0NTG",null,null,"OCLC","o760977475"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"192","IMPORT-1377205600.55222","02804cgm a2200613Ia 4500ocm51844300 OCoLC20130116205432.0vd mvaizu030312t20051997vtu072 e vleng d15780797569781578079759783421366893WG36689WGBH Boston Video(OCoLC)51844300TNHengTNHJBWOCLCQRCSOCLCQCYCBTCTARCSUMCOCLCQYDXCPSTFAKUXY4SBMBLPWAUOCLCAYVROCLCQUtOrBLWengengn-us---NTGAHV4504.R53 2003385/.097322DVD 385.0973 RIDRiding the rails[videorecording] /directed, written, & produced by Michael Uys and Lexy Lovell ; the American History Project ; Out of the Blue Productions, Inc.United States.[S. Burlington, Vt.] :WGBH Boston Video,[2005], c1997.1 videodisc (ca. 72 min.) :sd., col. with b&w sequences ;4 3/4 in.DVD; Dolby digital; aspect ratio: 4x3 full screen.In English; closed-captioned.Cinematography, Samuel Henriques ; editor, Howard Sharp ; original music, Jay Sherman-Godfrey.Originally produced as a documentary film in 1997.Companion to the book: Riding the rails, teenagers on the move during the Great Depression / by Errol Lincoln Uys.An edited version of \"Riding the rails\" was produced as an episode of the PBS documentary television program American experience in 2003.Not rated.\"Tells the unforgettable story of the 250,000 teenagers who left their homes and hopped freight trains during the Great Depression\"--Container.Special features: interview with Uys & Lovell (5 min.); slide show of Depression-era photos (3 min., 28 images); excerpt from the companion book; weblinks.Depressions1929United States.TeenagersUnited States.United StatesEconomic conditions1918-1945.United StatesHistory1919-1933.RailroadsUnited StatesHistory.TrampsUnited StatesHistory.Historical films.lcgftDocumentary films.lcgftVideo recordings for the hearing impaired.lcgftUys, Michael.prodrtausLovell, Lexy.ausprodrtAmerican History Project.Out of the Blue Entertainment (Firm)WGBH Video (Firm)American experience (Television program)cme2013-01-16hC0NTG",null,null,"OCLC","o51844300"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"193","IMPORT-1377205600.55222","01493cas a2200445 i 4500ocn823177198OCoLC20130116071520.0130102c20139999mduar s 0 a0eng c 2013234400(OCoLC)823177198IHVrdaIHVGZLHVLINUIULNTGUtOrBLWpccn-us---NTGAHA202.P76317.323R310 PROProQuest statistical abstract of the United States.Statistical abstract of the United StatesNational data bookLanham, Maryland :Bernan,2012-volumes :maps ;29 cmAnnualtextrdacontentunmediatedrdamediavolumerdacarrier2013-Volume for 2013 called also \"1st ed.\" in suggested citation.Content edited by ProQuest.Description based on: 2013; title from title page.Latest issue consulted: 2013.United StatesStatisticsPeriodicals.ProQuest (Firm)Online version:ProQuest statistical abstract of the U.S.[Ann Arbor, Mich.] : ProQuest, [2012]-(OCoLC)823161068Statistical abstract of the United States0081-4741(DLC) 04018089(OCoLC)1193890EP 20130116aC0NTG",null,null,"OCLC","o823177198"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"194","IMPORT-1377205600.55222","01853cam a2200397 a 4500ocn693809384OCoLC20130116202350.0111026s2012 cau e b 001 0 eng 2011044358015947615Uk7571485869781608717439 (pbk. : alk. paper)1608717437 (pbk. : alk. paper)(OCoLC)693809384(OCoLC)757148586DLCengDLCYDXBTCTAYDXCPZMMUKMGBBWXBDXCOOCLUUCXCDXNTGUtOrBLWpccn-us---NTGAKF8742.S914 2012347.73/2623R347.7326 SUP 2012The Supreme Court compendium :data, decisions & developments /Lee Epstein ... [et al.].5th ed.Thousand Oaks, Calif. :CQ Press,c2012.xxviii, 836 p. ;24 cm.Includes bibliographical references (p. 809-816) and index.The Supreme Court : an institutional perspective --The Supreme Court's review process, caseload, and cases --The Supreme Court's opinion, decision, and outcome trends --The justices : backgrounds, nominations, and confirmations --The justices : post-confirmation activities and departures from the Court --The justices : oral arguments, votes, and opinions --The Supreme Court : its political and legal environments --The Supreme Court and public opinion --The impact of the Supreme Court.United States.Supreme CourtOutlines, syllabi, etc.Constitutional lawUnited StatesOutlines, syllabi, etc.Judicial reviewUnited StatesOutlines, syllabi, etc.Epstein, Lee,1958-sc2013-01-16aC0NTG",null,null,"OCLC","o693809384"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"195","IMPORT-1377205600.55222","01935cam a2200409 a 4500ocn767824989OCoLC20130116182419.0111205s2012 ncua e ber 001 0 eng 2011046545015991274Uk101580315DNLM9780786464586 (softcover : alk. paper)0786464585 (softcover : alk. paper)(OCoLC)767824989DLCengDLCYDXUKMGBYDXCPCDXNLMSBMABGNTGUtOrBLWpccNTGARC523.M665 2012WT 13M821e 2012616.8/3100323R616.831003 MOO 2012Moore, Elaine A.,1948-Encyclopedia of Alzheimer's disease :with directories of research, treatment and care facilities /Elaine A. Moore with Lisa Moore ; illustrated by Marvin G. Miller ; foreword by David Perlmutter.2nd ed.Jefferson, N.C. :McFarland,c2012.viii, 447 p. :ill. ;26 cm.Includes bibliographical references and index.The encyclopedia --Long term day care treatment centers, by state --Research facilities.\"The second edition contains updated resources, research institution information, a listing of treatment and care facilities, advances in Alzheimer's disease research, genetics, diagnostic procedures, treatment, alternative medicine, brain plasticity, risk factors, clinical trial information, nursing home safety, and preventive measures. The book describes medical treatments used in other countries and the results of collaborative efforts\"--Provided by publisher.Alzheimer's diseaseEncyclopedias.Alzheimer's diseaseDirectories.Moore, Lisa,1973-sc2013-01-16aC0NTG",null,null,"OCLC","o767824989"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"196","IMPORT-1377205600.55222","01475cam a2200433Ia 4500ocn808438897OCoLC20130614123752.0120828s2013 nyua e bf 001 0 eng d016038641Uk9780071638562 (pbk.)0071638563 (pbk.)(OCoLC)808438897MPAMPAOCLCOTEFCNMBLUKMGBIULNTGUtOrBLWNTGARG101.C94 2013WQ 100C976 201361823R618 CUR 2013Current diagnosis & treatment :obstetrics & gynecology /[edited by] Alan H. DeCherney, ... [et al.].Current diagnosis and treatment :obstetrics and gynecologyObstetrics & gynecologyObstetrics and gynecology11th ed.New York :McGraw-Hill Medical,c2013.xv, 1,024 p. :ill. (some col.) ;24 cm.Previous ed.: Current diagnosis and treatment : obstetrics & gynecology. New York : Lange/McGraw-Hill, 2007.\"A Lange medical book\"--T.p.Includes bibliographical references and index.Obstetrics.ObstetricsDiagnosis.Gynecology.GynecologyDiagnosis.DeCherney, Alan H.Obstetrics & gynecology.sc2013-01-16aC0NTG",null,null,"OCLC","o808438897"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"197","IMPORT-1377205600.55222","01946cam a2200433 a 4500ocn707248074OCoLC20130614123752.0110705s2012 njua e 001 0 eng 2011028227015803963Uk9780470648858 (pbk.)0470648856 (pbk.)(OCoLC)707248074DLCengDLCYDXBTCTAYDXCPUKMGBBONDEBBGCOONTGUtOrBLWpccNTGANA31.C44 2012720.323R720.3 CHI 2012Ching, Frank,1943-A visual dictionary of architecture /Francis D. K. Ching.2nd ed.Hoboken, N.J. :Wiley,c2012.viii, 328 p. :ill. ;31 cm.Includes index.\"Over 66 basic aspects of architecture are comprehensively covered with over 5,000 words in a visual context, to help visual thinkers clarify meanings. Comprehensive index permits the reader to locate any important word in the text. Oversized pages help present complicated material in easy-to-comprehend spreads. - Written by one of the most famous architectural authors --Frank Ching's name alone is a key selling feature for this book--he has earned the respect and trust of designers, design educators, and students around the world\"--Provided by publisher.ArchitectureDictionaries.Picture dictionaries, English.ARCHITECTURE / General.bisacshArchitektur.(DE-588c)4002851-3.swdWörterbuch.(DE-588c)4066724-8.swdArchitektur.(DE-588)4002851-3.gndWörterbuch.(DE-588)4066724-8.gndCover imagehttp://catalogimages.wiley.com/images/db/jimages/9780470648858.jpgsc2013-01-16aC0NTG",null,null,"OCLC","o707248074"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"198","IMPORT-1377205600.55222","01062cam a2200313I 4500ocn812172044OCoLC20130116183508.0121008s2012 ilu e 001 0 eng d97809817736740981773672(OCoLC)812172044IMDIMDYDXCPNTGUtOrBLWNTGA737.497323R737.4973 SWISwiatek, Anthony.Encyclopedia of the commemorative coins of the United States :history, art, investment & collection of America's memorial coinage /edited and compiled by Anthony J. Swiatek.Chicago, Ill. :KWS Publishers,2012.712 p. :ill. ;24 cm.Includes index.Commemorative coinsUnited States.Commemorative coinsCollectors and collectingUnited States.Coins, American.Coins, AmericanCollectors and collecting.sc2013-01-16aC0NTG",null,null,"OCLC","o812172044"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"199","IMPORT-1377205600.55222","01487cam a2200421 a 4500ocn318420872OCoLC20130116201348.0111209s2012 caua e be 001 0 eng 20110501559780313349485 (hbk. : acid-free paper)0313349487 (hbk. : acid-free paper)9780313349492 (ebook)0313349495 (ebook)(OCoLC)318420872DLCengDLCBTCTAYDXCPBDXBWXIADMLYNTGUtOrBLWpccn-mx---NTGAF1210.M6175 2012972.08/4223R972.0842 MEXMexico today :an encyclopedia of life in the republic /Alex M. Saragoza, Ana Paula Ambrosi and Silvia D. Zárate, editors.Santa Barbara, Calif. :ABC-CLIO,c2012.2 v. (xxxvi, 728 p.) :ill. ;27 cm.Includes bibliographical references and index.V. 1.A-H --v. 2.I-Z.MexicoCivilizationEncyclopedias.MexicoSocial life and customsEncyclopedias.MexicoPolitics and government2000-Encyclopedias.MexicoEconomic conditions1994-Encyclopedias.Saragoza, Alex.Ambrosi, Ana Paula.Zárate, Silvia D.(Silvia Dolores)sc2013-01-16aC0NTG",null,null,"OCLC","o318420872"]} +{"__c":"bre","__p":[null,null,"t","now",1,"f","now",1,null,"200","IMPORT-1377205600.55222","01644cam a2200457Ma 4500ocn779454161OCoLC20130116200050.0110808s2012 caua e be 001 0deng 20110316669780313329449 (set : alk. paper)0313329443 (set : alk. paper)9780313329456 (v. 1 : alk. paper)0313329451 (v. 1 : alk. paper)9780313329463 (v. 2 : alk. paper)031332946X (v. 2 : alk. paper)(OCoLC)779454161DLCengN15OCLCONTGUtOrBLWn-us---NTGAE169.12.E515 2012973.923R973.9 ENCEncyclopedia of the sixties :a decade of culture and counterculture /James S. Baugess and Abbe Allen DeBolt, editors.Santa Barbara, Calif. :Greenwood,c2012.2 v. (xliv, 871 p.) :ill. ;26 cm.Includes bibliographical references and index.V. 1.A-M --v. 2.N-Z.United StatesCivilization20th centuryEncyclopedias.CountercultureUnited StatesHistoryEncyclopedias.United StatesHistory1961-1969Encyclopedias.United StatesBiographyEncyclopedias.Nineteen sixtiesEncyclopedias.CountercultureHistoryEncyclopedias.BiographyEncyclopedias.Baugess, James S.DeBolt, Abbe Allen.sc2013-01-16aC0NTG",null,null,"OCLC","o779454161"]} diff --git a/KCLS/bs_files/export/backstage_export.pl b/KCLS/bs_files/export/backstage_export.pl new file mode 100644 index 0000000000..df8d188789 --- /dev/null +++ b/KCLS/bs_files/export/backstage_export.pl @@ -0,0 +1,86 @@ +#!/usr/bin/perl +use strict; +use warnings; +use Time::HiRes; +use DBI; +use OpenSRF::System; + +# Script is used as a single file export process. +# Currently generates a list of ids and passes them to the test script +# +# Still needs to add the actual exporting of marc records by calling +# marc_export. +# +# Possibly remove the testing at some future date. +# +# For Cat Date will need to add parameters gathering to pass to +# generate_export_ids.pl + + +my $start = Time::HiRes::gettimeofday(); +my $generate = "perl generate_export_ids.pl"; +foreach (@ARGV) { + $generate = $generate . " " . $_; +} + +my $result = system($generate); + +if($result == 0) { + #Generate the file name for the current date + (my $sec,my $min,my $hour,my $mday,my $mon,my $year,my $wday,my $yday,my $isdst) = localtime(); + $mon++; + $year += 1900; + system("mkdir -p /var/KCLS_AUTH"); + system("cat /var/KCLS_AUTH/export_ids_" . $mon ."_" . $mday ."_" . $year . "| perl marc_export --encoding UTF-8 > /var/KCLS_AUTH/export_data_" . $mon ."_" . $mday ."_" . $year . ".mrc"); + + + my $config = '/openils/conf/opensrf_core.xml'; + OpenSRF::System->bootstrap_client( config_file => $config ); + + print "\nSetting export date to " . $year . "-" . $mon . "-" . $mday ."\n"; + + open INPUT, ") { + chomp; + if($_) { + push(@records, $_); + } + } + close INPUT; + my $dbh = connect_to_db(); + foreach(@records) { + my $query = "SELECT * FROM metabib.set_export_date(" . $_ . ",'" . $year . "-" . $mon . "-" . $mday . "')"; + my $sth = $dbh->prepare($query); + $sth->execute(); + $sth->finish(); + } + $dbh->disconnect(); + print "Set export date on " . scalar(@records) . " records.\n"; + my $end = Time::HiRes::gettimeofday(); + printf("\nFinished exporting records in %.2f seconds\n", $end - $start); +} + +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/bs_files/export/export_id_test.pl b/KCLS/bs_files/export/export_id_test.pl new file mode 100755 index 0000000000..4aa1422a91 --- /dev/null +++ b/KCLS/bs_files/export/export_id_test.pl @@ -0,0 +1,198 @@ +#!/usr/bin/perl + +# Generates test cases by gathering records that should not be exported +# and then checking with a list of export candidates to find and collisions. +# Outputs these collisions in a file called invalid_ids. +# +# Will need to add tests for Cat Date in the future + +use strict; +use warnings; + +use DBI; +use OpenSRF::System; + +# Delete in final? +use Data::Dumper; + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); +my @export_candidates; + +# Make some space for output +print "\n\n"; +# Chomp off the newlines +while(<>) { + chomp; + push(@export_candidates, $_); +} +my @results; +my @invalid_ids; +my @total_invalid_ids; + +############################################################ +# Verify that export candidates does not contain ids where # +# where 001 field contains 'oc' # +############################################################ + +print "Querying for 001 field test cases ... \n"; + +my $query = q{ + SELECT bre.id + FROM biblio.record_entry bre + JOIN metabib.real_full_rec mrfr + ON bre.id = mrfr.record + WHERE (mrfr.tag = '001' AND mrfr.value NOT ILIKE '%oc%') + INTERSECT ALL + SELECT bre.id + FROM biblio.record_entry bre + JOIN metabib.real_full_rec mrfr + ON bre.id = mrfr.record + WHERE(SELECT count(*) FROM metabib.real_full_rec mrfr WHERE (mrfr.tag = '035' AND mrfr.subfield ILIKE 'a' AND mrfr.value ILIKE '%WaOLN%')) = 0; +}; + +@results = run_query($query); + +@invalid_ids = find_invalid_ids(\@results, \@export_candidates); +push(@total_invalid_ids, "\n001 entries \n"); +push(@total_invalid_ids, @invalid_ids); + +############################################################ +# Verify that export candidates does not contain ids where # +# where 086 or 092 or 099 field does not contain 'on order'# +############################################################ + +print "\n\nQuerying for 086 or 092 or 099 field test cases ... \n"; + +$query = q{ + SELECT bre.id + FROM biblio.record_entry bre + JOIN metabib.real_full_rec mrfr + ON bre.id = mrfr.record + WHERE (mrfr.tag ILIKE '086' or mrfr.tag ILIKE '092' OR mrfr.tag ILIKE '099') + AND mrfr.value ILIKE '%on order%'; + +}; + +@results = run_query($query); + +@invalid_ids = find_invalid_ids(\@results, \@export_candidates); +push(@total_invalid_ids, "\n092 and 099 entries \n"); +push(@total_invalid_ids, @invalid_ids); + + +############################################################ +# Verify that export candidates either has items attached # +# or field 998 subfield d have value "d" or "t" or "v" # +# or "w" or "x" or "y" or "1" # +############################################################ + +print "\n\nQuerying for has items or 998 field test cases ... \n"; + +$query = q{ + SELECT bre.id + FROM biblio.record_entry bre + JOIN metabib.real_full_rec mrfr + ON bre.id = mrfr.record + WHERE (tag = '998' AND subfield = 'd' + AND value NOT IN ('d','t','v','w','x','y','1')) + AND NOT public.export_ids_has_copy(bre.id); + +}; + +@results = run_query($query); + +@invalid_ids = find_invalid_ids(\@results, \@export_candidates); +push(@total_invalid_ids, "\n998 entries \n"); +push(@total_invalid_ids, @invalid_ids); + +############################################################ +# Verify that export candidates does not have a 'd' as # +# byte 05 in the LDR field. This would be the sixth # +# character in the LDR field # +############################################################ + +print "\n\nQuerying for LDR does not have 'd' test cases ... \n"; + +$query = q{ + SELECT bre.id + FROM biblio.record_entry bre + JOIN metabib.real_full_rec mrfr + ON bre.id = mrfr.record + WHERE mrfr.tag ILIKE 'LDR' AND mrfr.value ~ '.....d'; + +}; + +@results = run_query($query); +@invalid_ids = find_invalid_ids(\@results, \@export_candidates); +push(@total_invalid_ids, "\nLDR entries \n"); +push(@total_invalid_ids, @invalid_ids); + +open OUTPUT, ">invalid_ids"; +foreach my $i (@total_invalid_ids) { + print OUTPUT "$i\n"; +} +close OUTPUT; + +sub find_invalid_ids { + my $first_list = shift; + my $second_list = shift; + my @invalids; + my %compare = (); + + print "Checking for invalid IDs ...\n"; + foreach my $i (@$first_list) { + $compare{$i} = 1; + } + foreach my $i (@$second_list) { + if(exists($compare{$i})) { + push(@invalids, $i); + } + } + print scalar @invalids . " invalid IDs found.\n"; + return @invalids; +} + +sub run_query { + my $query = shift; + my @results; + + my $dbh = connect_to_db(); + my $sth = $dbh->prepare($query); + $sth->execute(); + print "Found " . $sth->rows() . " test cases\n"; + print "Building test cases ...\n"; + while (my @row = $sth->fetchrow_array()) { + my ($id) = @row; + push(@results, $id); + } + $sth->finish(); + $dbh->disconnect(); + + return @results; +} + +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/bs_files/export/generate_export_ids.pl b/KCLS/bs_files/export/generate_export_ids.pl new file mode 100644 index 0000000000..b862a65126 --- /dev/null +++ b/KCLS/bs_files/export/generate_export_ids.pl @@ -0,0 +1,129 @@ +#!/usr/bin/perl + +# Generates a list of record ids that should be exported base on the following and outputs them in a file +# called export_ids + +# For a record to be exported it MUST MEET ALL of the following conditions + +# 1.The record must have at least one LDR field and byte 05 of every instance of that field must not be ‘d’. +# AND +# 2. The record must have at least one 001 field and at least one of those fields should contain “oc” +# AND +# 3. The record must have at least one of the following fields 086 or 092 or 099 and every instance +# of each of these fields must not contain “ON ORDER” +# AND +# 4. The record must have items attached OR the record must have at least one 998 field with subfield d +# that has one of the following values ‘d’ or ‘t’ or ‘v’ or ‘w’ or ‘x’ or ‘y’ or ‘1’ +# AND +# 5. The records Cat Date must fall between two dates supplied by the user. + +# Cat Date is currently partially implimented in the sql, it needs to be called there and parameters need +# to be gathered in this script and passed in to the stored procedure public.export_generate_ids + +use strict; +use warnings; + +use DBI; +use OpenSRF::System; +use Time::HiRes; + +# Maybe lose this in final +use Data::Dumper; + +my $first_date; +my $second_date; + +if (scalar @ARGV != 0 and scalar @ARGV != 2) { + print "\nArguments are invalid. Must have either 0 or 2 arguments\n"; + exit 1; +} + +if(scalar @ARGV == 2) { + $first_date = $ARGV[0]; + $second_date = $ARGV[1]; + + if($first_date !~ /^\d{4}.\d{1,2}.\d{1,2}$/ or $second_date !~ /^\d{4}.\d{1,2}.\d{1,2}$/) { + print "\nArguments are invalid. Expecting either zero arguments or two of the form YYYY-DD-MM\n"; + exit 1; + } + $first_date = "DATE '" . $ARGV[0] . "'"; + $second_date = "DATE '" . $ARGV[1] . "'"; +} else { + $first_date = "NULL"; + $second_date = "NULL"; +} + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +# Make some space for output +print "\n\n"; + +my $start = Time::HiRes::gettimeofday(); + + +my $query = ' + BEGIN; + SET statement_timeout = 0; + COMMIT; + SELECT * + FROM public.export_generate_ids(' . $first_date . ',' . $second_date . ')'; + +my $dbh = connect_to_db(); + +print "Querying database for IDs .. \n"; +my $sth = $dbh->prepare($query); +$sth->execute(); +print "Found " . $sth->rows() . " IDs\n"; + +print "Building output file ...\n"; + +(my $sec,my $min,my $hour,my $mday,my $mon,my $year,my $wday,my $yday,my $isdst) = localtime(); +$mon++; +$year += 1900; +system("mkdir -p /var/KCLS_AUTH"); +open OUTPUT, ">/var/KCLS_AUTH/export_ids_" . $mon ."_" . $mday ."_" .$year; + +my @output; +while (my @row = $sth->fetchrow_array()) { + push(@output, @row); +} + +## Sort the output file to aid in manual testing +my @sorted_output = sort { $a <=> $b } @output; +foreach (@sorted_output) { + print OUTPUT "$_\n"; +} + +my $end = Time::HiRes::gettimeofday(); +printf("Generated %d IDs in %.2f seconds\n", $sth->rows(),$end - $start); + +close OUTPUT; +$sth->finish(); +$dbh->disconnect(); + +exit 0; +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/bs_files/export/generate_test_bib_ids.pl b/KCLS/bs_files/export/generate_test_bib_ids.pl new file mode 100755 index 0000000000..d7ef6a49ca --- /dev/null +++ b/KCLS/bs_files/export/generate_test_bib_ids.pl @@ -0,0 +1,76 @@ +#!/usr/bin/perl +# This script is used to generate a list of bib record ids that match the export criteria. +# It takes a file as input and verifies that those records meet ther criteria and then generates +# 25,000 additional bibs to supliment them. +# +# Usage cat ID_FILE | perl generate_test_bib_ids.pl +# +# Results will be stored in the file new_bibs + +use strict; +use warnings; + +use DBI; +use OpenSRF::System; + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +my @bibs; +while(<>) { + chomp; + push(@bibs, $_); +} +my $dbh = connect_to_db(); +my $input_size = scalar @bibs; +my $bibs_string = join(',',@bibs); + +print "\n\nVerifying selected bibs meet export criteria.\n"; +my $query = ' + SELECT id + FROM biblio.record_entry + WHERE public.export_ids_001(id) AND public.export_ids_086_092_099(id) AND public.export_ids_LDR(id) + AND public.export_ids_998(id) AND id IN (' . $bibs_string . ')'; + +my $sth = $dbh->prepare($query); +$sth->execute(); +print "\nVerified " . $input_size . " bibs.\n"; +print $sth->rows() . " bibs passed verification.\n"; + +open OUTPUT, ">new_bibs"; +my @output; +while (my @row = $sth->fetchrow_array()) { + push(@output, @row); +} + +foreach (@output) { + print OUTPUT "$_\n"; +} +close OUTPUT; + +$sth->finish(); +$dbh->disconnect(); + +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/bs_files/export/marc_export b/KCLS/bs_files/export/marc_export new file mode 100755 index 0000000000..f7ee1184d2 --- /dev/null +++ b/KCLS/bs_files/export/marc_export @@ -0,0 +1,425 @@ +#!/usr/bin/perl +# vim:et:sw=4:ts=4: +use strict; +use warnings; +use bytes; + +use OpenSRF::System; +use OpenSRF::EX qw/:try/; +use OpenSRF::AppSession; +use OpenSRF::Utils::JSON; +use OpenSRF::Utils::SettingsClient; +use OpenILS::Application::AppUtils; +use OpenILS::Utils::Fieldmapper; +use OpenILS::Utils::CStoreEditor; + +use MARC::Record; +use MARC::File::XML ( BinaryEncoding => 'UTF-8' ); +use UNIVERSAL::require; + +use Time::HiRes qw/time/; +use Getopt::Long; + + +my @formats = qw/USMARC UNIMARC XML BRE ARE/; + +my $config = '/openils/conf/opensrf_core.xml'; +my $format = 'USMARC'; +my $encoding = 'MARC8'; +my $location = ''; +my $dollarsign = '$'; +my $idl = 0; +my $help = undef; +my $holdings = undef; +my $timeout = 0; +my $export_mfhd = undef; +my $type = 'biblio'; +my $all_records = undef; +my $replace_001 = undef; +my @library = (); + +GetOptions( + 'help' => \$help, + 'items' => \$holdings, + 'mfhd' => \$export_mfhd, + 'all' => \$all_records, + 'replace_001'=> \$replace_001, + 'location=s' => \$location, + 'money=s' => \$dollarsign, + 'config=s' => \$config, + 'format=s' => \$format, + 'type=s' => \$type, + 'xml-idl=s' => \$idl, + 'encoding=s' => \$encoding, + 'timeout=i' => \$timeout, + 'library=s' => \@library, +); + +if ($help) { +print <<"HELP"; +This script exports MARC authority, bibliographic, and serial holdings +records from an Evergreen database. + +Input to this script can consist of a list of record IDs, with one record ID +per line, corresponding to the record ID in the Evergreen database table of +your requested record type. + +Alternately, passing the --all option will attempt to export all records of +the specified type from the Evergreen database. The --all option starts at +record ID 1 and increments the ID by 1 until the largest ID in the database +is retrieved. This may not be very efficient for databases with large gaps +in their ID sequences. + +Usage: $0 [options] + --help or -h This screen. + --config or -c Configuration file [/openils/conf/opensrf_core.xml] + --format or -f Output format (USMARC, UNIMARC, XML, BRE, ARE) [USMARC] + --encoding or -e Output encoding (UTF-8, ISO-8859-?, MARC8) [MARC8] + --xml-idl or -x Location of the IDL XML + --timeout Timeout for exporting a single record; increase if you + are using --holdings and are exporting records that + have a lot of items attached to them. + --type or -t Record type (BIBLIO, AUTHORITY) [BIBLIO] + --all or -a Export all records; ignores input list + --library Export the bibliographic records that have attached + holdings for the listed library or libraries as + identified by shortname + --replace_001 Replace the 001 field value with the record ID + + Additional options for type = 'BIBLIO': + --items or -i Include items (holdings) in the output + --money Currency symbol to use in item price field [\$] + --mfhd Export serial MFHD records for associated bib records + Not compatible with --format=BRE + --location or -l MARC Location Code for holdings from + http://www.loc.gov/marc/organizations/orgshome.html + +Examples: + +To export a set of USMARC records in a file named "output_file" based on the +IDs contained in a file named "list_of_ids": + cat list_of_ids | $0 > output_file + +To export a set of MARC21XML authority records in a file named "output.xml" +for all authority records in the database: + $0 --format XML --type AUTHORITY --all > output.xml + +To export a set of USMARC bibliographic records encoded in UTF-8 in a file +named "sys1_bibs.mrc" based on records which have attached callnumbers for the +libraries with the short names "BR1" and "BR2": + + $0 --library BR1 --library BR2 --encoding UTF-8 > sys1_bibs.mrc + +HELP + exit; +} + +if ($all_records && @library) { + die('Incompatible arguments: you cannot combine a request for all ' . + 'records with a request for records by library'); +} + +$type = lc($type); +$format = uc($format); +$encoding = uc($encoding); + +binmode(STDOUT, ':raw') if ($encoding ne 'UTF-8'); +binmode(STDOUT, ':utf8') if ($encoding eq 'UTF-8'); + +if (!grep { $format eq $_ } @formats) { + die "Please select a supported format. ". + "Right now that means one of [". + join('|',@formats). "]\n"; +} + +if ($format ne 'XML') { + my $type = 'MARC::File::' . $format; + $type->require; +} + +if ($timeout <= 0) { + # set default timeout and/or correct silly user who + # supplied a negative timeout; default timeout of + # 300 seconds if exporting items determined empirically. + $timeout = $holdings ? 300 : 1; +} + +OpenSRF::System->bootstrap_client( config_file => $config ); + +if (!$idl) { + $idl = OpenSRF::Utils::SettingsClient->new->config_value("IDL"); +} + +Fieldmapper->import(IDL => $idl); + +my $ses = OpenSRF::AppSession->create('open-ils.cstore'); +OpenILS::Utils::CStoreEditor::init(); +my $editor = OpenILS::Utils::CStoreEditor->new(); + +print <
+ +HEADER + +my %orgs; +my %shelves; + +my $flesh = {}; + +if ($holdings) { + get_bib_locations(); +} + +my $start = time; +my $last_time = time; +my %count = ('bib' => 0, 'did' => 0); +my $speed = 0; + +if ($all_records) { + my $top_record = 0; + if ($type eq 'biblio') { + $top_record = $editor->search_biblio_record_entry([ + {deleted => 'f'}, + {order_by => { 'bre' => 'id DESC' }, limit => 1} + ])->[0]->id; + } elsif ($type eq 'authority') { + $top_record = $editor->search_authority_record_entry([ + {deleted => 'f'}, + {order_by => { 'are' => 'id DESC' }, limit => 1} + ])->[0]->id; + } + for (my $i = 0; $i++ < $top_record;) { + export_record($i); + } +} elsif (@library) { + my $recids = $editor->json_query({ + select => { bre => ['id'] }, + from => { bre => 'acn' }, + where => { + '+bre' => { deleted => 'f' }, + '+acn' => { + deleted => 'f', + owning_lib => { + in => { + select => {'aou' => ['id'] }, + from => 'aou', + where => { shortname => { in => \@library } } + } + } + } + }, + distinct => 1, + order_by => [{ + class => 'bre', + field => 'id', + direction => 'ASC' + }] + }); + + foreach my $record (@$recids) { + export_record($record->{id}); + }; +} else { + while ( my $i = <> ) { + export_record($i); + } +} +print "\n" if ($format eq 'XML'); + +$speed = $count{did} / (time - $start); +my $time = time - $start; +print STDERR <request( "open-ils.cstore.direct.$type.record_entry.retrieve", $id, $flesh ); + my $s = $r->recv(timeout => $timeout); + if (!$s) { + warn "\n!!!!! Failed trying to read record $id\n"; + return; + } + if ($r->failed) { + warn "\n!!!!!! Failed trying to read record $id: " . $r->failed->stringify . "\n"; + return; + } + if ($r->timed_out) { + warn "\n!!!!!! Timed out trying to read record $id\n"; + return; + } + $bib = $s->content; + $r->finish; + + $count{bib}++; + return unless $bib; + + if ($format eq 'ARE' or $format eq 'BRE') { + print OpenSRF::Utils::JSON->perl2JSON($bib); + stats(); + $count{did}++; + return; + } + + try { + + my $r = MARC::Record->new_from_xml( $bib->marc, $encoding, $format ); + if ($type eq 'biblio') { + add_bib_holdings($bib, $r); + } + + if ($replace_001) { + my $tcn = $r->field('001'); + if ($tcn) { + $tcn->update($id); + } else { + my $new_001 = MARC::Field->new('001', $id); + $r->insert_fields_ordered($new_001); + } + } + + if ($format eq 'XML') { + my $xml = $r->as_xml_record; + $xml =~ s/^<\?.+?\?>$//mo; + print $xml; + } elsif ($format eq 'UNIMARC') { + print $r->as_usmarc; + } elsif ($format eq 'USMARC') { + print $r->as_usmarc; + } + + $count{did}++; + + } otherwise { + my $e = shift; + warn "\n$e\n"; + import MARC::File::XML; # reset SAX parser so that one bad record doesn't kill the entire export + }; + + if ($export_mfhd and $type eq 'biblio') { + my $mfhds = $editor->search_serial_record_entry({record => $id, deleted => 'f'}); + foreach my $mfhd (@$mfhds) { + try { + my $r = MARC::Record->new_from_xml( $mfhd->marc, $encoding, $format ); + + if ($format eq 'XML') { + my $xml = $r->as_xml_record; + $xml =~ s/^<\?.+?\?>$//mo; + print $xml; + } elsif ($format eq 'UNIMARC') { + print $r->as_usmarc; + } elsif ($format eq 'USMARC') { + print $r->as_usmarc; + } + } otherwise { + my $e = shift; + warn "\n$e\n"; + import MARC::File::XML; # reset SAX parser so that one bad record doesn't kill the entire export + }; + } + } + + stats() if (! ($count{bib} % 50 )); +} + +sub stats { + try { + no warnings; + + $speed = $count{did} / (time - $start); + + my $speed_now = ($count{did} - $count{did_last}) / (time - $count{time_last}); + my $cn_speed = $count{cn} / (time - $start); + my $cp_speed = $count{cp} / (time - $start); + + printf STDERR "\r $count{did} of $count{bib} @ \%0.4f/s ttl / \%0.4f/s rt ". + "($count{cn} CNs @ \%0.4f/s :: $count{cp} CPs @ \%0.4f/s)\r", + $speed, + $speed_now, + $cn_speed, + $cp_speed; + } otherwise {}; + $count{did_last} = $count{did}; + $count{time_last} = time; +} + +sub get_bib_locations { + print STDERR "Retrieving Org Units ... "; + my $r = $ses->request( 'open-ils.cstore.direct.actor.org_unit.search', { id => { '!=' => undef } } ); + + while (my $o = $r->recv) { + die $r->failed->stringify if ($r->failed); + $o = $o->content; + last unless ($o); + $orgs{$o->id} = $o; + } + $r->finish; + print STDERR "OK\n"; + + print STDERR "Retrieving Shelving locations ... "; + $r = $ses->request( 'open-ils.cstore.direct.asset.copy_location.search', { id => { '!=' => undef } } ); + + while (my $s = $r->recv) { + die $r->failed->stringify if ($r->failed); + $s = $s->content; + last unless ($s); + $shelves{$s->id} = $s; + } + $r->finish; + print STDERR "OK\n"; + + $flesh = { flesh => 2, flesh_fields => { bre => [ 'call_numbers' ], acn => [ 'copies' ] } }; +} + +sub add_bib_holdings { + my $bib = shift; + my $r = shift; + + my $cn_list = $bib->call_numbers; + if ($cn_list && @$cn_list) { + $cn_list = [ grep { $_->deleted eq 'f' } @$cn_list ]; + $count{cn} += @$cn_list; + + my $cp_list = [ grep { $_->deleted eq 'f' } map { @{ $_->copies } } @$cn_list ]; + if ($cp_list && @$cp_list) { + + my %cn_map; + push @{$cn_map{$_->call_number}}, $_ for (@$cp_list); + + for my $cn ( @$cn_list ) { + my $cn_map_list = $cn_map{$cn->id}; + + for my $cp ( @$cn_map_list ) { + $count{cp}++; + + $r->insert_grouped_field( MARC::Field->new( '852', '4', ' ', + ($location ? ( 'a' => $location ) : ()), + b => $orgs{$cn->owning_lib}->shortname, + b => $orgs{$cp->circ_lib}->shortname, + c => $shelves{$cp->location}->name, + j => $cn->label, + ($cp->circ_modifier ? ( g => $cp->circ_modifier ) : ()), + p => $cp->barcode, + ($cp->price ? ( y => $dollarsign.$cp->price ) : ()), + ($cp->copy_number ? ( t => $cp->copy_number ) : ()), + ($cp->ref eq 't' ? ( x => 'reference' ) : ()), + ($cp->holdable eq 'f' ? ( x => 'unholdable' ) : ()), + ($cp->circulate eq 'f' ? ( x => 'noncirculating' ) : ()), + ($cp->opac_visible eq 'f' ? ( x => 'hidden' ) : ()), + ) + ); + + stats() if (! ($count{cp} % 100 )); + } + } + } + } +} diff --git a/KCLS/bs_files/export/set_cat_date.pl b/KCLS/bs_files/export/set_cat_date.pl new file mode 100755 index 0000000000..fb74baa73a --- /dev/null +++ b/KCLS/bs_files/export/set_cat_date.pl @@ -0,0 +1,94 @@ +#!/usr/bin/perl + +# This script sets the cataloging date for each bib in the input file. +# It takes two parameters +# --file the file containing the list of bibs +# --date This is the cat date to set. If this is omitted then cat date will be set to NULL +# +# Usage perl set_cat_date.pl --file file_of_bibs --date YYYY-MM-DD + + +use strict; +use warnings; + +use OpenSRF::System; +use DBI; +use Getopt::Long; +use Data::Dumper; + +my $file; +my $cat_date; +my %options; +my $result = GetOptions( + \%options, + 'file=s' => \$file, + 'date=s' => \$cat_date, +); + +if(!$file) { + print "Must specify file of bib ids.\n"; + exit; +} + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +# Read the file +my @records; +open FILE, "<", $file or die "Can't open file " . $file; +print "\nReading file " . $file . "\n"; +while() { + chomp; + if($_) { + push(@records, $_); + } +} +close FILE; + +# Build the base query +my $base_query = "UPDATE biblio.record_entry SET cataloging_date = "; +if(!$cat_date) { + $base_query = $base_query . "NULL"; + print "\nSetting cat date to NULL\n"; +} else { + $base_query = $base_query . "'" . $cat_date . "'"; + print "\nSetting cat date to " . $cat_date . "\n"; +} + +# Connect to DB and execute query +my $dbh = connect_to_db(); +print "\nUpdating rows.\n"; +foreach(@records) { + my $query = $base_query . " WHERE id = " . $_; + my $sth = $dbh->prepare($query); + $sth->execute(); + $sth->finish(); + } +$dbh->disconnect(); + +print "Updated " . scalar(@records) . " rows.\n"; + +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/bs_files/get_vandelay_merge_profile.sql b/KCLS/bs_files/get_vandelay_merge_profile.sql new file mode 100644 index 0000000000..89bfd686a1 --- /dev/null +++ b/KCLS/bs_files/get_vandelay_merge_profile.sql @@ -0,0 +1 @@ +SELECT id AS merge_profile FROM vandelay.merge_profile WHERE name LIKE 'Backstage Preserve Import' LIMIT 1; diff --git a/KCLS/bs_files/get_vandelay_queue.sql b/KCLS/bs_files/get_vandelay_queue.sql new file mode 100644 index 0000000000..42fff10e23 --- /dev/null +++ b/KCLS/bs_files/get_vandelay_queue.sql @@ -0,0 +1 @@ +SELECT id AS queue FROM vandelay.queue WHERE name LIKE 'unupdated_bib_because_modified_since_export' LIMIT 1; diff --git a/KCLS/bs_files/importAuths.sh b/KCLS/bs_files/importAuths.sh new file mode 100755 index 0000000000..4d8ffd47bb --- /dev/null +++ b/KCLS/bs_files/importAuths.sh @@ -0,0 +1,67 @@ +#!/bin/bash +# This script sets the config flags for reingest to skip metarecord, perform the import, and then reset the config flags. + +function timer() +{ + if [[ $# -eq 0 ]]; then + echo $(date '+%s') + else + local stime=$1 + etime=$(date '+%s') + + if [[ -z "$stime" ]]; then stime=$etime; fi + + dt=$((etime - stime)) + ds=$((dt % 60)) + dm=$(((dt / 60) % 60)) + dh=$((dt / 3600)) + printf '%d:%02d:%02d' $dh $dm $ds + fi +} + +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" + return +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" + return +fi + +if [ "$5" = "-d" ]; then + DIRECTORY=$6 +else + echo Please enter directory name with -d "directory" + return +fi + +if [ "$7" = "-f" ]; then + FINISHEDDIRECTORY=$8 +else + echo Please enter finished directory name with -f "finished directory" + return +fi + +if [ "$9" = "-w" ]; then + PASSWORD=${10} +else + echo Please enter the database password with -w "password" +fi + +echo hostname is $HNAME and port is $PORT '\n' +tmr=$(timer) + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +date + +perl $REPO_ROOT/bs_files/update_driver.pl --dir $DIRECTORY --finished-dir $FINISHEDDIRECTORY --database evergreen --host $HNAME --port $PORT --user evergreen --password $PASSWORD + +date +printf 'Elapsed time: %s\n' $(timer $tmr) diff --git a/KCLS/bs_files/importBibs.sh b/KCLS/bs_files/importBibs.sh new file mode 100755 index 0000000000..0243bfe405 --- /dev/null +++ b/KCLS/bs_files/importBibs.sh @@ -0,0 +1,75 @@ +#!/bin/bash + +# This script sets the config flags for reingest to skip metarecord, perform the import, and then reset the config flags. + +function timer() +{ + if [[ $# -eq 0 ]]; then + echo $(date '+%s') + else + local stime=$1 + etime=$(date '+%s') + + if [[ -z "$stime" ]]; then stime=$etime; fi + + dt=$((etime - stime)) + ds=$((dt % 60)) + dm=$(((dt / 60) % 60)) + dh=$((dt / 3600)) + printf '%d:%02d:%02d' $dh $dm $ds + fi +} + +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" + return +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" + return +fi + +if [ "$5" = "-d" ]; then + DIRECTORY=$6 +else + echo Please enter directory name with -d "directory" + return +fi + +if [ "$7" = "-f" ]; then + FINISHEDDIRECTORY=$8 +else + echo Please enter finished directory name with -f "finished directory" + return +fi + +if [ "$9" = "-w" ]; then + PASSWORD=${10} +else + echo Please enter the database password with -w "password" +fi + +echo hostname is $HNAME and port is $PORT '\n' +tmr=$(timer) + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +# Set config flags to force a reingest even if the marc is not changed +date +psql -d evergreen -h $HNAME -U evergreen -f $REPO_ROOT/bs_files/import_workflow/before_bib_import.sql -p $PORT + +date +perl $REPO_ROOT/bs_files/update_driver.pl --dir $DIRECTORY --finished-dir $FINISHEDDIRECTORY --database evergreen --host $HNAME --port $PORT --user evergreen --password $PASSWORD + +# Set config flags back to original state +date +psql -d evergreen -h $HNAME -U evergreen -f $REPO_ROOT/bs_files/import_workflow/after_bib_import.sql -p $PORT + +date +printf 'Elapsed time: %s\n' $(timer $tmr) diff --git a/KCLS/bs_files/import_workflow/after_bib_import.sql b/KCLS/bs_files/import_workflow/after_bib_import.sql new file mode 100644 index 0000000000..19833e4ff1 --- /dev/null +++ b/KCLS/bs_files/import_workflow/after_bib_import.sql @@ -0,0 +1,5 @@ +BEGIN; + +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.reingest.force_on_same_marc'; + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/after_metarecord_remapping.sql b/KCLS/bs_files/import_workflow/after_metarecord_remapping.sql new file mode 100644 index 0000000000..56e6813e77 --- /dev/null +++ b/KCLS/bs_files/import_workflow/after_metarecord_remapping.sql @@ -0,0 +1,11 @@ + +BEGIN; + +-- We need to skip most of the biblio.indexing_ingest_or_delete function that was already run with the bib import. +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.disable_authority_linking'; +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.disable_metabib_full_rec'; +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.disable_metabib_rec_descriptor'; +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.disable_located_uri'; +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.metarecord_mapping.skip_on_update'; + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/after_threaded_auth_import.sql b/KCLS/bs_files/import_workflow/after_threaded_auth_import.sql new file mode 100644 index 0000000000..3c8199864f --- /dev/null +++ b/KCLS/bs_files/import_workflow/after_threaded_auth_import.sql @@ -0,0 +1,15 @@ + +BEGIN; +-- Add back unique constraints on the metabib.browse_XXX_entry tables that were removed to avoid a deadlock issue. +ALTER TABLE IF EXISTS metabib.browse_author_entry + ADD CONSTRAINT browse_author_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_series_entry + ADD CONSTRAINT browse_series_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_subject_entry + ADD CONSTRAINT browse_subject_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_title_entry + ADD CONSTRAINT browse_title_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_call_number_entry + ADD CONSTRAINT browse_call_number_entry_sort_value_value_key UNIQUE(sort_value); + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/after_threaded_bib_import.sql b/KCLS/bs_files/import_workflow/after_threaded_bib_import.sql new file mode 100644 index 0000000000..ecba2302d7 --- /dev/null +++ b/KCLS/bs_files/import_workflow/after_threaded_bib_import.sql @@ -0,0 +1,22 @@ + +BEGIN; +-- metarecord_mapping is skipped because it was causing a deadlock on the threaded bib import +-- This command disables the flag to not skip metarecord_mapping +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.metarecord_mapping.skip_on_update'; + +COMMIT; + +BEGIN; +-- Add back unique constraints on the metabib.browse_XXX_entry tables that were removed to avoid a deadlock issue. +ALTER TABLE IF EXISTS metabib.browse_author_entry + ADD CONSTRAINT browse_author_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_series_entry + ADD CONSTRAINT browse_series_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_subject_entry + ADD CONSTRAINT browse_subject_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_title_entry + ADD CONSTRAINT browse_title_entry_sort_value_value_key UNIQUE(sort_value); +ALTER TABLE IF EXISTS metabib.browse_call_number_entry + ADD CONSTRAINT browse_call_number_entry_sort_value_value_key UNIQUE(sort_value); + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/auth_re_trg_create.sql b/KCLS/bs_files/import_workflow/auth_re_trg_create.sql new file mode 100644 index 0000000000..3b9af49705 --- /dev/null +++ b/KCLS/bs_files/import_workflow/auth_re_trg_create.sql @@ -0,0 +1,29 @@ +CREATE TRIGGER a_marcxml_is_well_formed + BEFORE INSERT OR UPDATE + ON authority.record_entry + FOR EACH ROW + EXECUTE PROCEDURE biblio.check_marcxml_well_formed(); + +--CREATE TRIGGER aaa_auth_ingest_or_delete +-- AFTER INSERT OR UPDATE +-- ON authority.record_entry +-- FOR EACH ROW +-- EXECUTE PROCEDURE authority.indexing_ingest_or_delete(); + +CREATE TRIGGER b_maintain_901 + BEFORE INSERT OR UPDATE + ON authority.record_entry + FOR EACH ROW + EXECUTE PROCEDURE maintain_901(); + +CREATE TRIGGER c_maintain_control_numbers + BEFORE INSERT OR UPDATE + ON authority.record_entry + FOR EACH ROW + EXECUTE PROCEDURE public.maintain_control_numbers(); + +CREATE TRIGGER map_thesaurus_to_control_set + BEFORE INSERT OR UPDATE + ON authority.record_entry + FOR EACH ROW + EXECUTE PROCEDURE authority.map_thesaurus_to_control_set(); diff --git a/KCLS/bs_files/import_workflow/auth_re_trg_drop.sql b/KCLS/bs_files/import_workflow/auth_re_trg_drop.sql new file mode 100644 index 0000000000..42f28acb90 --- /dev/null +++ b/KCLS/bs_files/import_workflow/auth_re_trg_drop.sql @@ -0,0 +1,11 @@ +DROP TRIGGER a_marcxml_is_well_formed ON authority.record_entry; + +--DROP TRIGGER aaa_auth_ingest_or_delete ON authority.record_entry; + +DROP TRIGGER b_maintain_901 ON authority.record_entry; + +DROP TRIGGER c_maintain_control_numbers ON authority.record_entry; + +DROP TRIGGER map_thesaurus_to_control_set ON authority.record_entry; + + diff --git a/KCLS/bs_files/import_workflow/before_bib_import.sql b/KCLS/bs_files/import_workflow/before_bib_import.sql new file mode 100644 index 0000000000..a82020a197 --- /dev/null +++ b/KCLS/bs_files/import_workflow/before_bib_import.sql @@ -0,0 +1,5 @@ +BEGIN; + +UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.reingest.force_on_same_marc'; + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/before_metarecord_remapping.sql b/KCLS/bs_files/import_workflow/before_metarecord_remapping.sql new file mode 100644 index 0000000000..374bc88482 --- /dev/null +++ b/KCLS/bs_files/import_workflow/before_metarecord_remapping.sql @@ -0,0 +1,11 @@ + +BEGIN; + +-- We need to skip most of the biblio.indexing_ingest_or_delete function that was already run with the bib import. +UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.disable_authority_linking'; +UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.disable_metabib_full_rec'; +UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.disable_metabib_rec_descriptor'; +UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.disable_located_uri'; +UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.metarecord_mapping.skip_on_update'; + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/before_threaded_auth_import.sql b/KCLS/bs_files/import_workflow/before_threaded_auth_import.sql new file mode 100644 index 0000000000..e4b0be19db --- /dev/null +++ b/KCLS/bs_files/import_workflow/before_threaded_auth_import.sql @@ -0,0 +1,16 @@ + +BEGIN; + +-- Drop unique constraints on the metabib.browse_XXX_entry tables to avoid a deadlock issue. +ALTER TABLE IF EXISTS metabib.browse_author_entry + DROP CONSTRAINT browse_author_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_subject_entry + DROP CONSTRAINT browse_subject_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_title_entry + DROP CONSTRAINT browse_title_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_series_entry + DROP CONSTRAINT browse_series_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_call_number_entry + DROP CONSTRAINT browse_call_number_entry_sort_value_value_key; + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/before_threaded_bib_import.sql b/KCLS/bs_files/import_workflow/before_threaded_bib_import.sql new file mode 100644 index 0000000000..2f63fe89a4 --- /dev/null +++ b/KCLS/bs_files/import_workflow/before_threaded_bib_import.sql @@ -0,0 +1,23 @@ + +BEGIN; +-- metarecord_mapping is skipped because it was causing a deadlock on the threaded bib import +-- This command sets the flag to skip metarecord_mapping +UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.metarecord_mapping.skip_on_update'; + +COMMIT; + +BEGIN; + +-- Drop unique constraints on the metabib.browse_XXX_entry tables to avoid a deadlock issue. +ALTER TABLE IF EXISTS metabib.browse_author_entry + DROP CONSTRAINT browse_author_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_subject_entry + DROP CONSTRAINT browse_subject_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_title_entry + DROP CONSTRAINT browse_title_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_series_entry + DROP CONSTRAINT browse_series_entry_sort_value_value_key; +ALTER TABLE IF EXISTS metabib.browse_call_number_entry + DROP CONSTRAINT browse_call_number_entry_sort_value_value_key; + +COMMIT; diff --git a/KCLS/bs_files/import_workflow/biblio_re_trg_create.sql b/KCLS/bs_files/import_workflow/biblio_re_trg_create.sql new file mode 100644 index 0000000000..2a04c7118d --- /dev/null +++ b/KCLS/bs_files/import_workflow/biblio_re_trg_create.sql @@ -0,0 +1,57 @@ +--CREATE triggers on biblio.record_entry + +CREATE TRIGGER a_marcxml_is_well_formed + BEFORE INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE biblio.check_marcxml_well_formed(); + +CREATE TRIGGER a_opac_vis_mat_view_tgr + AFTER INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE asset.cache_copy_visibility(); + +CREATE TRIGGER aaa_indexing_ingest_or_delete + AFTER INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE biblio.indexing_ingest_or_delete(); + +ALTER TABLE metabib.browse_entry ADD CONSTRAINT browse_entry_sort_value_value_key UNIQUE(sort_value, value); + +CREATE TRIGGER audit_biblio_record_entry_update_trigger + AFTER UPDATE OR DELETE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE auditor.audit_biblio_record_entry_func(); + +CREATE TRIGGER b_maintain_901 + BEFORE INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE maintain_901(); + +CREATE TRIGGER bbb_simple_rec_trigger + AFTER INSERT OR UPDATE OR DELETE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE reporter.simple_rec_trigger(); + +CREATE TRIGGER c_maintain_control_numbers + BEFORE INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE public.maintain_control_numbers(); + +CREATE TRIGGER fingerprint_tgr + BEFORE INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE biblio.fingerprint_trigger('eng', 'BKS'); + +CREATE TRIGGER language_filter_trigger + AFTER INSERT OR UPDATE + ON biblio.record_entry + FOR EACH ROW + EXECUTE PROCEDURE biblio.update_language_filter(); diff --git a/KCLS/bs_files/import_workflow/biblio_re_trg_drop.sql b/KCLS/bs_files/import_workflow/biblio_re_trg_drop.sql new file mode 100644 index 0000000000..756c50a27a --- /dev/null +++ b/KCLS/bs_files/import_workflow/biblio_re_trg_drop.sql @@ -0,0 +1,11 @@ +--DROP Script for triggers on biblio.record_entry +DROP TRIGGER a_marcxml_is_well_formed ON biblio.record_entry; +DROP TRIGGER a_opac_vis_mat_view_tgr ON biblio.record_entry; +DROP TRIGGER aaa_indexing_ingest_or_delete ON biblio.record_entry; +ALTER TABLE metabib.browse_entry DROP CONSTRAINT browse_entry_sort_value_value_key; +DROP TRIGGER audit_biblio_record_entry_update_trigger ON biblio.record_entry; +DROP TRIGGER b_maintain_901 ON biblio.record_entry; +DROP TRIGGER bbb_simple_rec_trigger ON biblio.record_entry; +DROP TRIGGER c_maintain_control_numbers ON biblio.record_entry; +DROP TRIGGER fingerprint_tgr ON biblio.record_entry; +DROP TRIGGER language_filter_trigger ON biblio.record_entry; diff --git a/KCLS/bs_files/import_workflow/import_workflow_readme.txt b/KCLS/bs_files/import_workflow/import_workflow_readme.txt new file mode 100644 index 0000000000..43be7ecf8e --- /dev/null +++ b/KCLS/bs_files/import_workflow/import_workflow_readme.txt @@ -0,0 +1,97 @@ +FileName: import_workflow_readme +Description: This document outlines the workflow for batch import of updated bib records and auth records. +********************************************************************************************************************************************************** +Follow these directions if you have a zip file from Backstage already. + +1. Move the folder Backstage from this directory to the perl directory. + /usr/lib/perl5/Backstage/ + + From in the bs_files/Backstage folder in the repo, + cp * /usr/lib/perl5/Backstage + +2. Make sure that MARC::Record is on version 2.0.3 and MARC::File::XML is on version 0.92 + a. cpan -D MARC::Record + b. cpan -D MARC::File::XML + + Extra: if your packages are wrong versions, + a. cpan install G/GM/GMCHARLT/MARC-Record-2.0.3.tar.gz + b. cpan install G/GM/GMCHARLT/MARC-XML-0.92.tar.gz + +3. Make sure services are running. The machine you are running this from should have an evergreen apache server. + +4. Create the following directories, if it does not already exist + a. mkdir /var/KCLS_AUTH + b. mkdir /var/KCLS_AUTH/bibs_to_do + c. mkdir /var/KCLS_AUTH/done_bibs + d. mkdir /var/KCLS_AUTH/unupdated + e. mkdir /var/KCLS_AUTH/auths_to_do + f. mkdir /var/KCLS_AUTH/done_auths + +5. Modify /home/kclsdev/24kcls_evergreen/bs_files/sample.json + Specifically the evergreen section: + + "evergreen": + { + "osrf_config":"/openils/conf/opensrf_core.xml", + "authentication": + { + "username" : "admin", + "password" : "winter15c0ming", + "workstation" : "RE-PC30128", + "type" : "Branch" + } + }, + + Change the username and password to match an admin evergreen user and the workstation to match one that has been registered. For the workstation, use the one from your staff client. + +6. Generate the .sql scripts for the Authority MARC record import by running, BSLWImport.pl + + ./BSLWImport -e 'date_string' -h 'hostname' -p 'port' sample.json name_of_zip_file_to_import 2>&1 | tee log_file_name_goes_here + + example: ./BSLWimport.pl -e '2012-12-15' -h evergreentest.catalystitservices.com -p 5415 sample.json 10000.AUTH.MRC.zip 2>&1 | tee limited_bibs_test_20140420 + + Catalyst: added the option -e(xport) that takes a date string + +7. Import the auth records with one script. This script removes unique constraints on browse_entry tables, updates the auth records, and adds unique constraints on browse_entry tables. + ./importBibs.sh -h hostname -p port -d sql_scripts_directory -f directory_for_finished_sql_scripts + + example for 215: ./importAuths.sh -h evergreentest.catalystitservices.com -p 5415 -d /var/KCLS_AUTH/auths_to_do -f /var/KCLS_AUTH/done_auths + + +8. Generate the .sql scripts for the MARC record import by running, BSLWImport.pl + + ./BSLWImport -e 'date_string' -h 'hostname' -p 'port' sample.json name_of_zip_file_to_import 2>&1 | tee log_file_name_goes_here + + example: ./BSLWimport.pl -e '2014-05-12' -h evergreentest.catalystitservices.com -p 5415 sample.json S1402.SMPL.BIBS.zip 2>&1 | tee log_4_S1402.SMPL.BIBS.zip.log + + Catalyst: added the option -e(xport) that takes a date string + Creates a list of record_entry ids that have been modified since the date and excludes those records from the import scripts + the updated MARC records from BackStage that are excluded are writen to a file in /var/KCLS_AUTH/unupdated/ (unupdated_record_entries_yyyymmdd) + +9. Import the updated bib records with one script. This script sets the config flags to skip metarecord reingest, removes unique constraints on browse_entry tables, updates the bib records, + adds unique constraints on browse_entry tables, and resets the config flags. + ./importBibs.sh -h hostname -p port -d sql_scripts_directory -f directory_for_finished_sql_scripts + + example for 215: ./importBibs.sh -h evergreentest.catalystitservices.com -p 5415 -d /var/KCLS_AUTH/bibs_to_do -f /var/KCLS_AUTH/done_bibs + +10. Now we handle bib records that have been modified since the export. + The marc records are in /var/KCLS_AUTH/unupdated/ + + We will use the script, modified_since_export_record_importer.pl. + + --user is the evergreen user + --password is the password for the user supplied + --queue is the id of the vandalay queue (id from vandelay.queued_bib_record) + --merge-profile is the id of the vandalay.merge_profile, we use the "Backstage Field Protection" + + Get merge-profile: psql -U evergreen -h evergreentest.catalystitservices.com -p 5415 -d evergreen -f get_vandelay_merge_profile.sql + + Get queue: psql -U evergreen -h evergreentest.catalystitservices.com -p 5415 -d evergreen -f get_vandelay_queue.sql + + example: perl modified_since_export_record_importer.pl --user admin --password #### --queue #### --merge-profile #### + +11. We need to remap the metarecord after the update of bib records. Use the following shell script: + ./runMetarecordRemapping.sh -h hostname -p portnumber + example for 215: ./runMetarecordRemapping.sh -h evergreentest.catalystitservices.com -p 5415 + + NOTE: If wanted to run for dev test mode and only reingest the export set, use the following command instead, ./runMetarecordRemapping.sh -h hostname -p portnumber -dev full_path_to_export_file \ No newline at end of file diff --git a/KCLS/bs_files/import_workflow/remove_duplicate_browse_entries.sql b/KCLS/bs_files/import_workflow/remove_duplicate_browse_entries.sql new file mode 100644 index 0000000000..4bc62d5390 --- /dev/null +++ b/KCLS/bs_files/import_workflow/remove_duplicate_browse_entries.sql @@ -0,0 +1,7 @@ +-- THis file runs the stored procedure, metabib.removed_duplicate_browse_entries() +BEGIN; + SET statement_timeout = 0; +COMMIT; +BEGIN; + SELECT * FROM metabib.remove_duplicate_browse_entries(); +COMMIT; diff --git a/KCLS/bs_files/json.bre b/KCLS/bs_files/json.bre new file mode 100644 index 0000000000..e69de29bb2 diff --git a/KCLS/bs_files/marc_stream_importer.conf b/KCLS/bs_files/marc_stream_importer.conf new file mode 100644 index 0000000000..329b247aff --- /dev/null +++ b/KCLS/bs_files/marc_stream_importer.conf @@ -0,0 +1,28 @@ +#-------------- file marc_stream_importer.conf -------------- + +### user and group to become +user opensrf +group opensrf + +### logging ? +log_file /openils/var/log/marc_stream_importer.log +log_level 3 +pid_file /openils/var/run/marc_stream_importer.pid + +### access control +# allow .+\.(net|com) +# allow domain\.com +# deny a.+ + +### background the process? +# background 1 + +### ports to bind +# host 127.0.0.1 +# port localhost:20204 +port 5544 + +### reverse lookups ? +# reverse_lookups on + +#-------------- file marc_stream_importer.conf -------------- diff --git a/KCLS/bs_files/marc_stream_importer.pl b/KCLS/bs_files/marc_stream_importer.pl new file mode 100644 index 0000000000..a453fea5a9 --- /dev/null +++ b/KCLS/bs_files/marc_stream_importer.pl @@ -0,0 +1,590 @@ +#!/usr/bin/perl +# Copyright (C) 2008-2010 Equinox Software, Inc. +# Author: Bill Erickson +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# Sample command to run, perl marc_stream_importer.pl --spoolfile /home/kclsdev/24kcls_evergreen/bs_files/single_bre_1244460.mrc --user admin --password winter15c0ming --queue 1086 --merge-profile 4 --nodaemon + +use strict; use warnings; +use Net::Server::PreFork; +use base qw/Net::Server::PreFork/; +use MARC::Record; +use MARC::Batch; +use MARC::File::XML ( BinaryEncoding => 'UTF-8' ); +use MARC::File::USMARC; + +use Data::Dumper; +use File::Basename qw/fileparse/; +use File::Temp; +use Getopt::Long qw(:DEFAULT GetOptionsFromArray); +use Pod::Usage; +use Socket; + +use OpenSRF::Utils::Logger qw/$logger/; +use OpenSRF::AppSession; +use OpenSRF::EX qw/:try/; +use OpenILS::Utils::Cronscript; +use OpenSRF::Transport::PeerHandle; +require 'oils_header.pl'; +use vars qw/$apputils/; + +my $vl_ses; + +my $debug = 0; + +my %defaults = ( + 'buffsize=i' => 4096, + 'merge-profile=i' => 0, + 'source=i' => 1, +# 'osrf-config=s' => '/openils/conf/opensrf_core.xml', + 'user=s' => 'admin', + 'password=s' => '', + 'tempdir=s' => '', + 'spoolfile=s' => '', + 'nolockfile' => 1, + 'queue=i' => 1, + 'noqueue' => 0, + 'nodaemon' => 0, + 'wait=i' => 5, + 'import-by-queue' => 0 +); + +$OpenILS::Utils::Cronscript::debug=1 if $debug; +$Getopt::Long::debug=1 if $debug > 1; +my $o = OpenILS::Utils::Cronscript->new(\%defaults); + +my @script_args = (); + +if (grep {$_ eq '--'} @ARGV) { + print "Splitting options into groups\n" if $debug; + while (@ARGV) { + $_ = shift @ARGV; + $_ eq '--' and last; # stop at the first -- + push @script_args, $_; + } +} else { + @script_args = @ARGV; + @ARGV = (); +} + +print "Calling MyGetOptions ", + (@script_args ? "with options: " . join(' ', @script_args) : 'without options from command line'), + "\n" if $debug; + +my $real_opts = $o->MyGetOptions(\@script_args); +$o->bootstrap; +# GetOptionsFromArray(\@script_args, \%defaults, %defaults); # similar to + +$real_opts->{tempdir} ||= tempdir_setting(); # This doesn't go in defaults because it reads config, must come after bootstrap + +my $bufsize = $real_opts->{buffsize}; +my $bib_source = $real_opts->{source}; +my $osrf_config = $real_opts->{'osrf-config'}; +my $oils_username = $real_opts->{user}; +my $oils_password = $real_opts->{password}; +my $help = $real_opts->{help}; +my $merge_profile = $real_opts->{'merge-profile'}; +my $queue_id = $real_opts->{queue}; +my $tempdir = $real_opts->{tempdir}; +my $import_by_queue = $real_opts->{'import-by-queue'}; + $debug += $real_opts->{debug}; + +foreach (keys %$real_opts) { + print("real_opt->{$_} = ", $real_opts->{$_}, "\n") if $real_opts->{debug} or $debug; +} +my $wait_time = $real_opts->{wait}; +my $authtoken = ''; + +# DEFAULTS for Net::Server +my $filename = fileparse($0, '.pl'); +my $conf_file = (-r "$filename.conf") ? "$filename.conf" : undef; +# $conf_file is the Net::Server config for THIS script (not EG), if it exists and is readable + + +# FEEDBACK + +pod2usage(1) if $help; +unless ($oils_password) { + print STDERR "\nERROR: password option required for session login\n\n"; + # pod2usage(1); +} + +print Dumper($o) if $debug; + +if ($debug) { + foreach my $ref (qw/bufsize bib_source osrf_config oils_username oils_password help conf_file debug/) { + no strict 'refs'; + printf "%16s => %s\n", $ref, (eval("\$$ref") || ''); + } +} + +print warning(); +print Dumper($real_opts); + +# SUBS + +sub tempdir_setting { + my $ret = $apputils->simplereq( qw# opensrf.settings opensrf.settings.xpath.get + /opensrf/default/apps/open-ils.vandelay/app_settings/databases/importer # ); + return $ret->[0] || '/tmp'; +} + +sub warning { + return <simplereq( + 'open-ils.cat', + 'open-ils.cat.biblio.record.xml.import', + @_ + ); +} + +sub old_process_batch_data { + my $data = shift or $logger->error("process_batch_data called without any data"); + my $isfile = shift; + $data or return; + + my $handle; + if ($isfile) { + $handle = $data; + } else { + open $handle, '<', \$data; + } + + my $batch = MARC::Batch->new('USMARC', $handle); + $batch->strict_off; + + my $index = 0; + my $imported = 0; + my $failed = 0; + + while (1) { + my $rec; + $index++; + + eval { $rec = $batch->next; }; + + if ($@) { + $logger->error("Failed parsing MARC record $index"); + $failed++; + next; + } + last unless $rec; # The only way out + + my $resp = xml_import($authtoken, $rec->as_xml_record, $bib_source); + + # has the session timed out? + if (oils_event_equals($resp, 'NO_SESSION')) { + new_auth_token(); + $resp = xml_import($authtoken, $rec->as_xml_record, $bib_source); # try again w/ new token + } + oils_event_die($resp); + $imported++; + } + + return ($imported, $failed); +} + +sub process_spool { # filename + + my $marcfile = shift; + my @rec_ids; + + if($import_by_queue) { + + # don't collect the record IDs, just spool the queue + + $apputils->simplereq( + 'open-ils.vandelay', + 'open-ils.vandelay.bib.process_spool', + $authtoken, + undef, + $queue_id, + 'import', + $marcfile, + $bib_source + ); + + } else { + + # collect the newly queued record IDs for processing + + my $req = $vl_ses->request( + 'open-ils.vandelay.bib.process_spool.stream_results', + $authtoken, + undef, # cache key not needed + $queue_id, + 'import', + $marcfile, + $bib_source + ); + + while(my $resp = $req->recv) { + + if($req->failed) { + $logger->error("Error spooling MARC data: $resp"); + + } elsif($resp->content) { + push(@rec_ids, $resp->content); + } + } + } + + return \@rec_ids; +} + +sub bib_queue_import { + my $rec_ids = shift; + my $extra = { + auto_overlay_exact => 1, + import_no_match => 1, + }; + $extra->{merge_profile} = $merge_profile if $merge_profile; + + my $req; + my @cleanup_recs; + + if($import_by_queue) { + # import by queue + + $req = $vl_ses->request( + 'open-ils.vandelay.bib_queue.import', + $authtoken, + $queue_id, + $extra + ); + + } else { + # import explicit record IDs + + $req = $vl_ses->request( + 'open-ils.vandelay.bib_record.list.import', + $authtoken, + $rec_ids, + $extra + ); + } + + # collect the successfully imported vandelay records + my $failed = 0; + while(my $resp = $req->recv) { + if($req->failed) { + $logger->error("Error importing MARC data: $resp"); + + } elsif(my $data = $resp->content) { + + if($data->{err_event}) { + + $logger->error(Dumper($data->{err_event})); + $failed++; + + } else { + push(@cleanup_recs, $data->{imported}) if $data->{imported}; + } + } + } + + # clean up the successfully imported vandelay records to prevent queue bloat + my $pcrud = OpenSRF::AppSession->create('open-ils.pcrud'); + $pcrud->connect; + $pcrud->request('open-ils.pcrud.transaction.begin', $authtoken)->recv; + my $err; + + foreach (@cleanup_recs) { + + try { + + $pcrud->request('open-ils.pcrud.delete.vqbr', $authtoken, $_)->recv; + + } catch Error with { + $err = shift; + $logger->error("Error deleteing queued bib record $_: $err"); + }; + } + + $pcrud->request('open-ils.pcrud.transaction.commit', $authtoken)->recv unless $err; + $pcrud->disconnect; + + $logger->info("imported queued vandelay records: @cleanup_recs"); + return (scalar(@cleanup_recs), $failed); +} + +sub process_batch_data { + my $data = shift or $logger->error("process_batch_data called without any data"); + my $isfile = shift; + $data or return; + + $vl_ses = OpenSRF::AppSession->create('open-ils.vandelay'); + + my ($handle, $tempfile); + if (!$isfile) { + ($handle, $tempfile) = File::Temp->tempfile("$0_XXXX", DIR => $tempdir) or die "Cannot write tempfile in $tempdir"; + print $handle $data; + close $handle; + } else { + $tempfile = $data; + } + + $logger->info("Calling process_spool on tempfile $tempfile (queue: $queue_id; source: $bib_source)"); + my $rec_ids = process_spool($tempfile); + + if (oils_event_equals($rec_ids, 'NO_SESSION')) { # has the session timed out? + new_auth_token(); + $rec_ids = process_spool($tempfile); # try again w/ new token + } + + my ($imported, $failed) = bib_queue_import($rec_ids); + + if (oils_event_equals($imported, 'NO_SESSION')) { # has the session timed out? + new_auth_token(); + ($imported, $failed) = bib_queue_import(); # try again w/ new token + } + + oils_event_die($imported); + + return ($imported, $failed); +} + +sub process_request { # The core Net::Server method + my $self = shift; + my $client = $self->{server}->{client}; + + my $sockname = getpeername($client); + my ($port, $ip_addr) = unpack_sockaddr_in($sockname); + $logger->info("stream parser received contact from ".inet_ntoa($ip_addr)); + + my $ph = OpenSRF::Transport::PeerHandle->retrieve; + if(!$ph->flush_socket()) { + $logger->error("We received a request, bu we are no longer connected to opensrf. ". + "Exiting and dropping request from $client"); + exit; + } + + my $data = ''; + eval { + local $SIG{ALRM} = sub { die "alarm\n" }; + alarm $wait_time; # prevent accidental tie ups of backend processes + local $/ = "\x1D"; # MARC record separator + $data = ; + alarm 0; + }; + + if($@) { + $logger->error("reading from STDIN failed or timed out: $@"); + return; + } + + $logger->info("stream parser read " . length($data) . " bytes"); + + my ($imported, $failed) = (0, 0); + + new_auth_token(); # login + + if ($real_opts->{noqueue}) { + ($imported, $failed) = old_process_batch_data($data); + } else { + ($imported, $failed) = process_batch_data($data); + } + + my $profile = (!$merge_profile) ? '' : + $apputils->simplereq( + 'open-ils.pcrud', + 'open-ils.pcrud.retrieve.vmp', + $authtoken, + $merge_profile)->name; + + my $msg = ''; + $msg .= "Successfully imported $imported records using merge profile '$profile'\n" if $imported; + $msg .= "Failed to import $failed records\n" if $failed; + $msg .= "\x00"; + print $client $msg; + + clear_auth_token(); # logout +} + +sub standalone_process_request { # The command line version + my $file = shift; + + $logger->info("stream parser received file processing request for $file"); + + my $ph = OpenSRF::Transport::PeerHandle->retrieve; + if(!$ph->flush_socket()) { + $logger->error("We received a request, bu we are no longer connected to opensrf. ". + "Exiting and dropping request for $file"); + exit; + } + + my ($imported, $failed) = (0, 0); + + new_auth_token(); # login + + if ($real_opts->{noqueue}) { + ($imported, $failed) = old_process_batch_data($file, 1); + } else { + ($imported, $failed) = process_batch_data($file, 1); + } + + my $profile = (!$merge_profile) ? '' : + $apputils->simplereq( + 'open-ils.pcrud', + 'open-ils.pcrud.retrieve.vmp', + $authtoken, + $merge_profile)->name; + + my $msg = ''; + $msg .= "Successfully imported $imported records using merge profile '$profile'\n" if $imported; + $msg .= "Failed to import $failed records\n" if $failed; + $msg .= "\x00"; + print $msg; + + clear_auth_token(); # logout +} + + +# the authtoken will timeout after the configured inactivity period. +# When that happens, get a new one. +sub new_auth_token { + $authtoken = oils_login($oils_username, $oils_password, 'staff') + or die "Unable to login to Evergreen as user $oils_username"; + return $authtoken; +} + +sub clear_auth_token { + $apputils->simplereq( + 'open-ils.auth', + 'open-ils.auth.session.delete', + $authtoken + ); +} + +##### MAIN ###### + +osrf_connect($osrf_config); +if ($real_opts->{nodaemon}) { + if (!$real_opts->{spoolfile}) { + print " --nodaemon mode requested, but no --spoolfile supplied!\n"; + exit; + } + standalone_process_request($real_opts->{spoolfile}); +} else { + print "Calling Net::Server run ", (@ARGV ? "with command-line options: " . join(' ', @ARGV) : ''), "\n"; + __PACKAGE__->run(conf_file => $conf_file); +} + +__END__ + +=head1 NAME + +marc_stream_importer.pl - Import MARC records via bare socket connection. + +=head1 SYNOPSIS + +./marc_stream_importer.pl [common opts ...] [script opts ...] -- [Net::Server opts ...] & + +This script uses the EG common options from B. See --help output for those. + +Run C for full documentation. + +Note the extra C<--> to separate options for the script wrapper from options for the +underlying L options. + +Note: this script has to be run in the same directory as B. + +Typical server-style execution will include a trailing C<&> to run in the background. + +=head1 DESCRIPTION + +This script is a L instance for shoving records into Evergreen from a remote system. + +=head1 OPTIONS + +The only required option is --password + + --password = + --user = default: admin + --source = default: 1 Integer + --merge-profile = default: 0 + --tempdir = default: from L + --source = default: 1 + --import-by-queue = default: 0 + --spoolfile = default: NONE File to import in --nodaemon mode + --nodaemon default: OFF When used with --spoolfile, turns off Net::Server mode and runs this utility in the foreground + + +=head2 Old style: --noqueue and associated options + +To bypass vandelay queue processing and push directly into the database (as the old style) + + --noqueue default: OFF + --buffsize = default: 4096 Buffer size. Only used by --noqueue + --wait = default: 5 Seconds to read socket before processing. Only used by --noqueue + +=head2 Net::Server Options + +By default, the script will use the Net::Server configuration file B. You can +override this by passing a filepath with the --conf_file option. + +Other Net::Server options include: --port= --min_servers= --max_servers= and --log_file=[path/to/file] + +See L for a complete list. + +=head2 Configuration + +=head3 OCLC Connexion + +To use this script with OCLC Connexion, configure the client as follows: + +Under Tools -> Options -> Export (tab) + Create -> Choose Connection -> OK -> Leave translation at "None" + -> Create -> Create -> choose TCP/IP (internet) + -> Enter hostname and Port, leave 'Use Telnet Protocol' checked + -> Create/OK your way out of the dialogs + Record Characteristics (button) -> Choose 'UTF-8 Unicode' for the Character Set + + +OCLC and Connexion are trademark/service marks of OCLC Online Computer Library Center, Inc. + +=head1 CAVEATS + +WARNING: This script provides no inherent security layer. Any client that has +access to the server+port can inject MARC records into the system. +Use the available options (like allow/deny) in the Net::Server config file +or via the command line to restrict access as necessary. + +=head1 EXAMPLES + +./marc_stream_importer.pl \ + admin open-ils connexion --port 5555 --min_servers 2 \ + --max_servers=20 --log_file=/openils/var/log/marc_net_importer.log & + +./marc_stream_importer.pl \ + admin open-ils connexion --port 5555 --min_servers 2 \ + --max_servers=20 --log_file=/openils/var/log/marc_net_importer.log & + +=head1 SEE ALSO + +L, L + +=head1 AUTHORS + + Bill Erickson + Joe Atzberger + Mike Rylander (nodaemon+spoolfile mode) + +=cut diff --git a/KCLS/bs_files/metarecord_remapping/data_update_driver.html b/KCLS/bs_files/metarecord_remapping/data_update_driver.html new file mode 100644 index 0000000000..59171ab0bb --- /dev/null +++ b/KCLS/bs_files/metarecord_remapping/data_update_driver.html @@ -0,0 +1,449 @@ + + + + +data_update_driver.pl + + + + + + + + + + + +

+

+
+

Name

+

data_update_driver.pl

+

+

+
+

Description

+

The purpose for this script is to take SQL statements that act on a lot of +data (e.g. long UPDATEs) and break them into reasonable sized chunks. The +first partition is by the number of processes you want to run. The size is +the total number of keys in the table divided by the number of processes.

+

The second partition is the number of rows you want to process at a time +within a first partition. For instance, the number of keys and processes +may create a first partition of 10,000. Within that, you might want to +process 500 rows at a time.

+

The script will start a new child process for each of the first partitions +and within those, will process n number of rows, n and processes +being configurable.

+

+

+
+

Modules

+

Modules used and what for.

+

Note: to load a new module into your local system, execute

+
+        perl -MCPAN -e shell
+        cpan> Module::Name
+        cpan> quit
+
+
DBI
+ +
+

All interactions with the database and SQL.

+
+
DBD::Pg
+ +
+

Specific protocol for PostGreSQL.

+
+
Parallel::ForkManager
+ +
+

Runs up to n number of processes in parallel. n is set by --max-processes.

+
+
File::Basename
+ +
+

Use basename() to strip off path and extention of a file name.

+
+
Getopt::Long
+ +
+

Retrieve long command line options (e.g. --xxx) and do simple validation.

+
+
Term::ReadKey
+ +
+

Used to hide console echoing while typing in passwords.

+
+
Pod::Usage
+ +
+

Used to display usage messages that are pulled from the program's POD.

+
+
+

+

+
+

Usage

+
+        post_update_driver.pl [--dir <directory>] [--finished-dir <dir>]
+                [--database <db_name>] [--host <host_name>] 
+                [--user <user_name>] [--password <password>] 
+                [--exit-on-error] [--max-processes <number>] 
+                [--rows <number>]
+

+

+
+

Arguments

+
+
dir
+ +
+

The directory containing the update scripts. It defaults to the current +directory.

+
+
finished-dir
+ +
+

The directory where the scripts are moved to when they finish correctly. It +defaults to a folder called 'finished'.

+
+
database
+ +
+

The database name.

+
+
host
+ +
+

The host name or IP address for the database.

+
+
user
+ +
+

The user name for the database.

+
+
password
+ +
+

The password for the database. If none is provided, it will ask for one.

+
+
exit-on-error
+ +
+

Flag: should the script stop when it encounters an SQL error? If not, it will +continue processing. Regardless, an error file is created with the extension +'.err' that holds the error message.

+
+
max-processes
+ +
+

Number: what is the maximum number of processes to run in parallel? The +default is four.

+
+
rows
+ +
+

The number of rows the SQL script should process at a time. The default +is 500.

+
+
help
+ +
+

Display usage and exit.

+
+
+

+

+
+

Pre-loop

+

Get command line options. Get password if none is supplied. Exit if dir +does not exist. If finished_dir does not exist, create it. Setup database +parameters. Test database parameters to see if they connect correctly. If +exit_on_error is set, make a callback for the parent process so it will die +if a child processes returns an error code. Remove all error files. These +would have been created by a previous run.

+

+

+
+

Main

+

Roll through each data file in dir that ends with _data.sql. Read the +file line by line. The first comment (--) is the script's description. +The first line that begins "SELECT MAX" is considered the id min and max +SQL, all in one line. The id ranges are determined by +executing this SQL. The partition size is determined by diving the total +number of keys by max-process.

+

This script is responsible for creating its own wrapper function to be called +with a start id and end id, if needed. When the script encounters a data line +that begins +"CREATE [OR REPLACE] FUNCTION", it will start collecting lines. It will end +when it finds a line that starts with "$$ LANGUAGE". All of the lines between +these two, inclusive, are the create wrapper function script. The wrapper +function should call the update function passing starting and ending ID. You +may not need a wrapper function if you are calling an UPDATE direectly.

+

The data line that starts with "DROP FUNCTION" is considered SQL to drop the +wrapper function, if needed. It should be one line only. You do not need this +SQL if you are not using the CREATE FUNCTION SQL.

+

If a data line starts with "ALTER TABLE", it is considered the enable/disable +triggers SQL statement. All triggers are disable before running the updates +and enabled afterward.

+

After all of the above lines +are removed from consideration, what remains is the actual SQL update. It is +often just a SELECT statement that calls the wrapper function with the place +holders "~start_id~" and "~end_id~". This script will replace them with the +values it calculates.

+

Comments and blank lines are ignored.

+

Then, start a loop for each partition size and +start a child process for each one. Within each partision, execute the SQL +on only rows number of rows. This is determined by setting the starting +and ending ID. Since IDs aren't always sequential, there may be less that +rows number of rows updated.

+

After all partitions execute the file script is moved +to the finished_dir folder. File scripts that encountered errors stay in +dir with their error files. Triggers are enabled.

+

A sample data file might look like this:

+
+        SELECT MAX(id), MIN(id) from schema.some_table;
+        
+        ALTER TABLE schema.some_table DISABLE TRIGGER ALL;
+        
+        UPDATE schema.some_table SET col_name = something 
+        WHERE id >= ~start_id~ AND id < ~end_id~;
+

A sample file that updates using a standard function in Evergreen might look +like this:

+
+        CREATE OR REPLACE FUNCTION schema.wrapper_function(start_id BIGINT, end_id BIGINT) 
+        RETURNS void AS $$
+        DECLARE
+                rec RECORD;
+        BEGIN
+                FOR rec IN SELECT id, some_col FROM schema.table_to_update WHERE id >= start_id AND id < end_id 
+                LOOP
+                        PERFORM schema.update_function( rec.id, rec.some_col );
+                END LOOP;
+        END;
+        $$ LANGUAGE plpgsql;
+        
+        DROP FUNCTION IF EXISTS schema.wrapper_function(BIGINT, BIGINT);
+        
+        SELECT MAX(id), MIN(id) from schema.table_to_update;
+        
+        ALTER TABLE schema.some_table DISABLE TRIGGER ALL;
+        
+        SELECT schema.wrapper_function(~start_id~, ~end_id~);
+

+

+
+

Subroutines

+

+

+

get_db_handle

+

Get a database handle

+
+
Parameters
+ +
+

$db_params - reference to several DB parameters

+
+
Returns
+ +
+

$dbh - database handle or zero

+
+
+

+

+

run_sql

+

Execute a non-SELECT SQL statement and capture any error output

+
+
Parameters
+ +
+

$db_params - the DB parameters (ref to hash)

+

$sql - the SQL statement

+

$file - the file name

+

$desc - a description of the task (first comment)

+

$start_d - the starting ID when the error occurred or zero

+
+
Returns
+ +
+

1 = success, 0 = failure

+
+
Side Effects
+ +
+

Creates a file with the extension .err if there is an error executing the SQL

+
+
+

+

+

run_select_sql

+

Execute a SELECT SQL statement and fetch one column

+
+
Parameters
+ +
+

$db_params - the DB parameters (ref to hash)

+

$sql - the SQL statement

+

$file - the data input file name

+

$desc - a description of the task (first comment)

+

$start_id - the starting ID when the error occurred or zero

+
+
Returns
+ +
+

An array of column values in list context, or a reference to the array in +scalar context

+
+
Side Effects
+ +
+

Creates a file with the extension .err if there is an error executing the SQL

+
+
+

+

+

get_error_file

+

Create and open an error file. Put in a timestamp. The error file name is +the file name with the extention of .err.

+
+
Parameters
+ +
+

$file - the file name to create the error file for

+

$start_id - the starting ID when the error occurred or zero

+
+
Returns
+ +
+

An array in list context; a reference to an array in scalar context

+

[0] $fail_fh - the file handle of the openned error file

+

[1] $error_file - the error file name

+
+
+

+

+

print_time

+

Print time elapsed in hours, minutes, and seconds

+
+
Parameters
+ +
+

$start - the start time, taken from the time() function

+
+
Side Effects
+ +
+

Prints elasped time to the standand out

+
+
+

+

+

create_helper_func

+

Create any helper functions needed by the update. The input data file should +contain a commented DROP statement what will drop the function when it's not +needed. For example:

+
+        -- DROP FUNCTION IF EXISTS schema.some_function(BIGINT, TEXT)
+        
+        CREATE OR REPLACE FUNCTION schema.some_function(id BIGINT, marc TEXT)
+        ...
+
+
Parameters
+ +
+

$dir - the source directory for the input data files

+

$db_params - a hash reference to the DB parameters

+
+
Returns
+ +
+

In list context, an array of SQL DROP statements that will remove the helper +functions at the end of the update. In scalar context, a reference to that array.

+
+
Side Effects
+ +
+

An error file is created if an error in encountered.

+
+
+

+

+

able_all_triggers

+

Enable/Disable triggers on a table. The SQL is pulled from the input data +file line that begins "ALTER TABLE".

+
+
Paramters
+ +
+

$able - The word ENABLE or DISABLE, depending on what you want to do to +the triggers. Defaults to DISABLE.

+

$range_sql - The SQL statement that gets the ID ranges, previously +extracted from the input data file.

+

$file - The name of the input data file.

+

$db_params - The DB parameters (ref to hash)

+
+
Side Effects
+ +
+

Enables or disables triggers for a table.

+
+
+

+

+

parse_input_file

+

Parse the input data file for different SQL statements and return each +statement.

+
+
Parameters
+ +
+

$input_fh - a file handle opened to the input file

+

$file - the input file name

+
+
Returns
+ +
+

In array context, an array of all the different SQL statements parsed. In +scalar context, a reference to that array.

+

[0] $sql - the main updating SQL statement(s)

+

[1] $desc - the description of this task

+

[2] $range_sql - the SQL statement that gets the ID ranges

+

[3] $create_func_sql - the SQL to create a wrapper function, if any

+

[4] $drop_func_sql - the SQL statement that drops the wrapper function, if any

+

[5] $able_trigger - the SQL to enable/disable all triggers on the update table

+
+
+ + + + diff --git a/KCLS/bs_files/metarecord_remapping/data_update_driver.pl b/KCLS/bs_files/metarecord_remapping/data_update_driver.pl new file mode 100644 index 0000000000..2a0e64b851 --- /dev/null +++ b/KCLS/bs_files/metarecord_remapping/data_update_driver.pl @@ -0,0 +1,996 @@ +#!perl -w + +=pod + +=head1 Name + +data_update_driver.pl + +=head1 Description + +The purpose for this script is to take SQL statements that act on a lot of +data (e.g. long UPDATEs) and break them into reasonable sized chunks. The +first partition is by the number of processes you want to run. The size is +the total number of keys in the table divided by the number of processes. + +The second partition is the number of rows you want to process at a time +within a first partition. For instance, the number of keys and processes +may create a first partition of 10,000. Within that, you might want to +process 500 rows at a time. + +The script will start a new child process for each of the first partitions +and within those, will process I number of rows, I and processes +being configurable. + +=cut + +use strict; +use warnings; +use v5.8; +use integer; + +=head1 Modules + +Modules used and what for. + +Note: to load a new module into your local system, execute + + perl -MCPAN -e shell + cpan> Module::Name + cpan> quit + +=over 4 + +=item DBI + +All interactions with the database and SQL. + +=item DBD::Pg + +Specific protocol for PostGreSQL. + +=item Parallel::ForkManager + +Runs up to I number of processes in parallel. I is set by B<--max-processes>. + +=item File::Basename + +Use basename() to strip off path and extention of a file name. + +=item Getopt::Long + +Retrieve long command line options (e.g. --xxx) and do simple validation. + +=item Term::ReadKey + +Used to hide console echoing while typing in passwords. + +=item Pod::Usage + +Used to display usage messages that are pulled from the program's POD. + +=back + +=cut + +use DBI; +use DBD::Pg; +use Parallel::ForkManager; +use File::Basename; +use Getopt::Long; +use Term::ReadKey; +use Pod::Usage; + +=head1 Usage + + post_update_driver.pl [--dir ] [--finished-dir ] + [--database ] [--host ] [--port ] + [--user ] [--password ] + [--exit-on-error] [--max-processes ] + [--rows ] + +=head1 Arguments + +=over 4 + +=item B + +The directory containing the update scripts. It defaults to the current +directory. + +=item B + +The directory where the scripts are moved to when they finish correctly. It +defaults to a folder called 'finished'. + +=item B + +The database name. + +=item B + +The host name or IP address for the database. + +The port number. + +=item B + +The port number for the database. + +=item B + +The user name for the database. + +=item B + +The password for the database. If none is provided, it will ask for one. + +=item B + +Flag: should the script stop when it encounters an SQL error? If not, it will +continue processing. Regardless, an error file is created with the extension +'.err' that holds the error message. + +=item B + +Number: what is the maximum number of processes to run in parallel? The +default is four. + +=item B + +The number of rows the SQL script should process at a time. The default +is 500. + +=item B + +Display usage and exit. + +=back + +=cut + +$| = 1; #auto flush + +my $dir = '.'; +my $finished_dir = 'finished'; +my $database = ''; +my $host = ''; +my $port = 5432; # PostGres's default port +my $user = ''; +my $password = ''; +my $max_processes = 6; +my $rows = 500; +my $help; +my $exit_on_error; + +# Use Getopt::Long to get the command line options. Use the POD section +# "Usage" if an option is entered incorrectly +GetOptions( + 'help!' => \$help, # default is false... + 'exit-on-error!' => \$exit_on_error, + 'dir=s' => \$dir, # strings... + 'finished-dir=s' => \$finished_dir, + 'database=s' => \$database, + 'host=s' => \$host, + 'port=s' => \$port, + 'user=s' => \$user, + 'password=s' => \$password, + 'max-processes=i' => \$max_processes, # numeric + 'rows=i' => \$rows +) or pod2usage( -verbose => 99, -sections => [ 'Usage' ], -exitval => 2 ); + +# Print the POD Usage and Arguments sections if the help flag is up +if ($help) { + pod2usage( + -verbose => 99, + -sections => [ 'Usage', 'Arguments' ], + -exitval => 1 ); +} + +=head1 Pre-loop + +Get command line options. Get password if none is supplied. Exit if B +does not exist. If B does not exist, create it. Setup database +parameters. Test database parameters to see if they connect correctly. If +B is set, make a callback for the parent process so it will die +if a child processes returns an error code. Remove all error files. These +would have been created by a previous run. + +=cut + +# Get password if not supplied +unless ($password) { + print "Type your password: "; + ReadMode('noecho'); # don't display characters while typing + chomp($password = ); + ReadMode(0); # back to normal + print "\n"; +} + +# Check the directories +$dir =~ s|\\|/|g; # backslashes to slashes +$dir =~ s|/$||; # remove trailing slash +$finished_dir =~ s|\\|/|; +$finished_dir =~ s|/$||; + +unless ( -d $dir ) { + die "$dir does not exist\n"; +} + +unless ( -d $finished_dir ) { + mkdir $finished_dir or die "Could not create $finished_dir\n$!\n"; +} + +# Database connect info +my $db_params = { + platform => 'Pg', # Always PostGreSQL + database => $database, + host => $host, + port => $port, + user => $user, + pw => $password +}; + +# Check that database info is correct +my $test_dbh = get_db_handle( $db_params ); +$test_dbh->disconnect; + +my $pm = Parallel::ForkManager->new($max_processes); + +# Callback that checks the exit status of the children. +# If we should exit on error, tell the parent to die. +if ($exit_on_error) { + $pm->run_on_finish( + sub { + my ($pid, $exit_code, $ident) = @_; + + if ($exit_code == 1) { + die "Child process encountered an error in the SQL\n"; + } elsif ($exit_code == 2) { + die "Child process encountered an error during rename\n"; + } + } + ); +} + +print "Removing error files...\n"; +unlink glob "$dir/*.err"; + +=head1 Main + +Roll through each data file in B that ends with _data.sql. Read the +file line by line. The first comment (--) is the script's description. +The first line that begins "SELECT MAX" is considered the id min and max +SQL, all in one line. The id ranges are determined by +executing this SQL. The partition size is determined by diving the total +number of keys by B. + +This script is responsible for creating its own wrapper function to be called +with a start id and end id, if needed. When the script encounters a data line +that begins +"CREATE [OR REPLACE] FUNCTION", it will start collecting lines. It will end +when it finds a line that starts with "$$ LANGUAGE". All of the lines between +these two, inclusive, are the create wrapper function script. The wrapper +function should call the update function passing starting and ending ID. You +may not need a wrapper function if you are calling an UPDATE direectly. + +The data line that starts with "DROP FUNCTION" is considered SQL to drop the +wrapper function, if needed. It should be one line only. You do not need this +SQL if you are not using the CREATE FUNCTION SQL. + +If a data line starts with "ALTER TABLE", it is considered the enable/disable +triggers SQL statement. All triggers are disable before running the updates +and enabled afterward. + +After all of the above lines +are removed from consideration, what remains is the actual SQL update. It is +often just a SELECT statement that calls the wrapper function with the place +holders "~start_id~" and "~end_id~". This script will replace them with the +values it calculates. + +Comments and blank lines are ignored. + +Then, start a loop for each partition size and +start a child process for each one. Within each partision, execute the SQL +on only B number of rows. This is determined by setting the starting +and ending ID. Since IDs aren't always sequential, there may be less that +B number of rows updated. + +After all partitions execute the file script is moved +to the B folder. File scripts that encountered errors stay in +B with their error files. Triggers are enabled. + +A sample data file might look like this: + + SELECT MAX(id), MIN(id) from schema.some_table; + + ALTER TABLE schema.some_table DISABLE TRIGGER ALL; + + UPDATE schema.some_table SET col_name = something + WHERE id >= ~start_id~ AND id < ~end_id~; + +A sample file that updates using a standard function in Evergreen might look +like this: + + CREATE OR REPLACE FUNCTION schema.wrapper_function(start_id BIGINT, end_id BIGINT) + RETURNS void AS $$ + DECLARE + rec RECORD; + BEGIN + FOR rec IN SELECT id, some_col FROM schema.table_to_update WHERE id >= start_id AND id < end_id + LOOP + PERFORM schema.update_function( rec.id, rec.some_col ); + END LOOP; + END; + $$ LANGUAGE plpgsql; + + DROP FUNCTION IF EXISTS schema.wrapper_function(BIGINT, BIGINT); + + SELECT MAX(id), MIN(id) from schema.table_to_update; + + ALTER TABLE schema.some_table DISABLE TRIGGER ALL; + + SELECT schema.wrapper_function(~start_id~, ~end_id~); + +=cut + +print "Begin creating helper functions...\n"; +my $time = time(); +my @drop_func = create_helper_func( $dir, $db_params ); + +print "Begin executing post update scripts...\n"; +my $error; + +# All of these processes will run in parallel, up to $max_processes +foreach my $file ( glob "$dir/*_data.sql" ) { + my $input_fh; + + # Open file + unless ( open ($input_fh, '<', $file) ) { + + # Log error on failure + my $system_error = $!; + my ( $fail_fh, $error_file ) = get_error_file( $file ); + print $fail_fh "Unable to open $file for reading\n"; + print $fail_fh "$system_error\n"; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "Unable to open $file for reading\n"; + + next; + } + + # Parse input for different SQL statements + my ($sql, $desc, $range_sql, $create_func_sql, $drop_func_sql, $able_trigger, $truncate_sql) = + parse_input_file( $input_fh, $file ); + + #Truncate Table statement + if ($truncate_sql) { + next unless run_sql( $db_params, $truncate_sql, $file, $desc ); + } + + # Disable all triggers for this table + print "Disabling triggers...\n"; + able_all_triggers( 'DISABLE', $able_trigger, $file, $db_params ); + + # Create the function that will get called with an id range + if ($create_func_sql) { + next unless run_sql( $db_params, $create_func_sql, $file, $desc ); + } + + # Get the id ranges for this table + print "Getting id ranges...\n"; + unless ($range_sql) { die "*** Bad input script $file, no id ranges\n" } + my ($max_id, $min_id) = run_select_sql( $db_params, $range_sql, $file, $desc ); + + unless ( defined $max_id and defined $min_id ) { + my ( $fail_fh ) = get_error_file( $file ); + print $fail_fh "Could not determine the id ranges\n"; + next; + } + + # Break table into partitions based on id ranges and processes + my $count = $max_id - $min_id; + my $part_size = $count / $max_processes; # int div because of use integer + my $print_file = basename $file; + + for ( my $part = 0; $part < $count; $part += $part_size ) { + my $pid = $pm->start and next; + print "\t$file, part $part ($pid)\n"; + my $print_file = basename $file; + + # Execute SQL in ranges of ids based on min/max ids + for ( my $start_id = $part; $start_id < $part + $part_size; $start_id += $rows ) { + + # Set the start id in a copy of the SQL string + (my $exec_sql = $sql) =~ s/~start_id~/$start_id/i; + + # The last limit will probably not be the exact rows amount + my $left = $count - $start_id + 1; + my $this_rows = $rows <= $left ? $rows : $left; + $exec_sql =~ s/~end_id~/$start_id + $this_rows/ie; + + # Execute the SQL + if ( run_sql($db_params, $exec_sql, $file, $desc, $start_id ) ) { + print "\tstart id: $start_id, " . ($part + $part_size - $start_id) . + " left ($print_file)\n"; + } else { + $error = 1; + last; + } + } + + # Inform the parent process of the error + if ($error) { + $pm->finish(1); + last; + } else { + $pm->finish; + } + } + + $pm->wait_all_children; + + # Succesful finish, move script + unless ($error) { + my $base = basename $file; + my $finish_name = "$finished_dir/$base"; + + rename $file, $finish_name + or warn "*** Could not rename $file to $finish_name\n$!\n"; + } + + # Drop wrapper function + if ($drop_func_sql) { + unless ( run_sql( $db_params, $drop_func_sql, $file, $desc ) ) { + warn "*** Could not drop wrapper function\n"; + } + } + + # Enable all triggers for this table + print "Enabling triggers...\n"; + able_all_triggers( 'ENABLE', $able_trigger, $file, $db_params ); + + last if $error && $exit_on_error; + +} # foreach file + +# Drop any temporary functions used above +foreach my $drop (@drop_func) { + run_sql($db_params, $drop, 'No file', 'Drop function' ); +} + +print 'Finished' . ($error ? ' with error' : '') . "\n"; + +# Do this when the program ends, no matter what. +# A side effect of this is that time will print when each child process ends. +END { + print_time( $time ); +} + +=head1 Subroutines + +=head2 get_db_handle + +Get a database handle + +=over 4 + +=item Parameters + +B<$db_params> - reference to several DB parameters + +=item Returns + +B<$dbh> - database handle or zero + +=back + +=cut + +sub get_db_handle { + my $db_params = shift || return 0; + + my $platform = $db_params->{platform}; + my $database = $db_params->{database}; + my $host = $db_params->{host}; + my $port = $db_params->{port}; + my $user = $db_params->{user}; + my $pw = $db_params->{pw}; + + my $dsn = "dbi:$platform:dbname = $database; host = $host; port = $port"; + + my $dbh = DBI->connect( $dsn, $user, $pw, { + 'PrintError' => 1, + 'RaiseError' => 1, + 'PrintWarn' => 1, + 'AutoCommit' => 0 # Auto commit off so we can commit/rollback + }) or die "Unable to connect: " . $DBI::errstr . "\n"; + + return $dbh; +} + +=head2 run_sql + +Execute a non-SELECT SQL statement and capture any error output + +=over 4 + +=item Parameters + +B<$db_params> - the DB parameters (ref to hash) + +B<$sql> - the SQL statement + +B<$file> - the file name + +B<$desc> - a description of the task (first comment) + +B<$start_d> - the starting ID when the error occurred or zero + +=item Returns + +1 = success, 0 = failure + +=item Side Effects + +Creates a file with the extension .err if there is an error executing the SQL + +=back + +=cut + +sub run_sql { + my $db_params = shift; + my $sql = shift; + my $file = shift || 'no_file'; + my $desc = shift || 'SQL Script'; + my $start_id = shift || '0'; + + # Sanity check + unless ( $db_params and ref $db_params eq 'HASH' and $sql ) { + return 0; + } + + my $dbh = get_db_handle($db_params); + + $dbh->do('SET statement_timeout = 0;'); + $dbh->commit; + + # Catch any errors + eval { $dbh->do($sql) }; + + # If there were errors... + if ($@) { + warn "$@\n"; + + # Log SQL error + my $err = $dbh->errstr; + $dbh->rollback; + my $rollback_err = $dbh->errstr; + warn "*** $file rolled back\n" unless $rollback_err; + $dbh->disconnect; + my $disconnect_err = $dbh->errstr; + my ( $fail_fh, $error_file ) = get_error_file( $file, $start_id ); + + print $fail_fh "Can't execute SQL statement!\n"; + print $fail_fh "$file: $desc\n"; + print $fail_fh "$err\n"; + print $fail_fh "Rollback error: $rollback_err\n" if $rollback_err; + print $fail_fh "Disconnect error: $disconnect_err\n" if $disconnect_err; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Can't execute SQL statement! $file: $desc\n"; + + return 0; + } + + $dbh->commit; + $dbh->disconnect; + + return 1; +} + +=head2 run_select_sql + +Execute a SELECT SQL statement and fetch one column + +=over 4 + +=item Parameters + +B<$db_params> - the DB parameters (ref to hash) + +B<$sql> - the SQL statement + +B<$file> - the data input file name + +B<$desc> - a description of the task (first comment) + +B<$start_id> - the starting ID when the error occurred or zero + +=item Returns + +An array of column values in list context, or a reference to the array in +scalar context + +=item Side Effects + +Creates a file with the extension .err if there is an error executing the SQL + +=back + +=cut + +sub run_select_sql { + my $db_params = shift; + my $sql = shift; + my $file = shift || 'no_file'; + my $desc = shift || 'SQL Script'; + my $start_id = shift || '0'; + + # Sanity check + unless ( $db_params and ref $db_params eq 'HASH' and $sql ) { + return 0; + } + + my $dbh = get_db_handle($db_params); + my @row; + my $sth; + + eval { + $sth = $dbh->prepare( $sql ); + $sth->execute(); + @row = $sth->fetchrow_array(); + }; + + if ($@) { + warn "$@\n"; + + # Log SQL error + my $err = $dbh->errstr; + $dbh->disconnect; + my $disconnect_err = $dbh->errstr; + my ( $fail_fh, $error_file ) = get_error_file( $file, $start_id ); + + print $fail_fh "Can't execute SQL statement!\n"; + print $fail_fh "$file: $desc\n"; + print $fail_fh "$sql\n"; + print $fail_fh "$err\n"; + print $fail_fh "Disconnect error: $disconnect_err\n" if $disconnect_err; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Can't execute SQL statement! $file: $desc\n"; + + return undef; + } + + $sth->finish; + $dbh->disconnect; + return wantarray ? @row : \@row; +} + +=head2 get_error_file + +Create and open an error file. Put in a timestamp. The error file name is +the file name with the extention of .err. + +=over 4 + +=item Parameters + +B<$file> - the file name to create the error file for + +B<$start_id> - the starting ID when the error occurred or zero + +=item Returns + +An array in list context; a reference to an array in scalar context + +[0] B<$fail_fh> - the file handle of the openned error file + +[1] B<$error_file> - the error file name + +=back + +=cut + +sub get_error_file { + my $file = shift || 'unknown'; + my $start_id = shift || '0'; + + my ($sec, $min, $hour, $mday, $mon, $year) = (localtime(time))[0..5]; + my $timestamp = "$hour:$min:$sec " . ($mon + 1) . "-$mday-" . ($year + 1900); + my ( $basename, $dir ) = fileparse( $file, '.sql' ); + my $error_file = "$dir$basename-$start_id.err"; + my $fail_fh; + + open ($fail_fh, '>>', $error_file) + or die "Could not open $error_file for appending\n"; + print $fail_fh "$timestamp\n"; + my @return_data = ( $fail_fh, $error_file ); + + return wantarray ? @return_data : \@return_data; +} + +=head2 print_time + +Print time elapsed in hours, minutes, and seconds + +=over 4 + +=item Parameters + +B<$start> - the start time, taken from the I function + +=item Side Effects + +Prints elasped time to the standand out + +=back + +=cut + +sub print_time { + my $start = shift || 0; + my $current = time(); + my $elapsed = $current - $start; + my $hours = $elapsed / (60 * 60); + my $seconds = $elapsed % 60; + my $minutes = ($elapsed - $hours * 60 * 60) / 60; + + print "$current - "; + print "Time elapsed: "; + print "$hours hours, " if $hours; + print "$minutes minutes, " if $minutes; + print "$seconds seconds\n"; +} + +=head2 create_helper_func + +Create any helper functions needed by the update. The input data file should +contain a commented DROP statement what will drop the function when it's not +needed. For example: + + -- DROP FUNCTION IF EXISTS schema.some_function(BIGINT, TEXT) + + CREATE OR REPLACE FUNCTION schema.some_function(id BIGINT, marc TEXT) + ... + +=over 4 + +=item Parameters + +B<$dir> - the source directory for the input data files + +B<$db_params> - a hash reference to the DB parameters + +=item Returns + +In list context, an array of SQL DROP statements that will remove the helper +functions at the end of the update. In scalar context, a reference to that array. + +=item Side Effects + +An error file is created if an error in encountered. + +=back + +=cut + +sub create_helper_func { + my $dir = shift; + my $db_params = shift; + my @drop_func = (); + + foreach my $file ( glob "$dir/*_create.sql" ) { + + # Open file and get SQL statement + unless ( open (FH, '<', $file) ) { + + # Log error on failure + my $system_error = $!; + my ( $fail_fh, $error_file ) = get_error_file( $file ); + print $fail_fh "Unable to open $file for reading\n"; + print $fail_fh "$system_error\n"; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Unable to open $file for reading\n"; + + next; + } + + my $sql = ''; + + # Loop thru create file + while () { + + # Collect DROP FUNCs in array + if ( /^\s*--\s*DROP\s+FUNCTION/ ) { + s/^\s*--\s*//; + push @drop_func, $_; + next; + } + + next if /^\s*--/; + next if /^\s*$/; #* this comment helps syntax highlighting + + $sql .= $_; + } + + close FH or warn "*** Could not close $file\n$!\n"; + run_sql( $db_params, $sql, $file, 'Create function' ) + or die "*** Could not create helper file\n"; + + } # end foreach $file + + return wantarray ? @drop_func : \@drop_func; +} + +=head2 able_all_triggers + +Enable/Disable triggers on a table. The SQL is pulled from the input data +file line that begins "ALTER TABLE". + +=over 4 + +=item Paramters + +B<$able> - The word ENABLE or DISABLE, depending on what you want to do to +the triggers. Defaults to DISABLE. + +B<$range_sql> - The SQL statement that gets the ID ranges, previously +extracted from the input data file. + +B<$file> - The name of the input data file. + +B<$db_params> - The DB parameters (ref to hash) + +=item Side Effects + +Enables or disables triggers for a table. + +=back + +=cut + +sub able_all_triggers { + my $able = shift || 'DISABLE'; + my $able_trigger = shift; + my $file = shift; + my $db_params = shift; + + unless ( $able =~ /ENABLE|DISABLE/i ) { + warn "*** Bad first param in able_all_triggers()\n"; + } + + # Change the SQL statement to reflect enabling or disabling + (my $sql = $able_trigger) =~ s{\b(?:ENABLE|DISABLE)\b}{\U$able\E}i; + + unless ( run_sql( $db_params, $sql, $file, "\L$able\E triggers" ) ) { + warn "*** Cannot \L$able\E triggers\n"; + } +} + +=head2 parse_input_file + +Parse the input data file for different SQL statements and return each +statement. + +=over 4 + +=item Parameters + +B<$input_fh> - a file handle opened to the input file + +B<$file> - the input file name + +=item Returns + +In array context, an array of all the different SQL statements parsed. In +scalar context, a reference to that array. + +[0] B<$sql> - the main updating SQL statement(s) + +[1] B<$desc> - the description of this task + +[2] B<$range_sql> - the SQL statement that gets the ID ranges + +[3] B<$create_func_sql> - the SQL to create a wrapper function, if any + +[4] B<$drop_func_sql> - the SQL statement that drops the wrapper function, if any + +[5] B<$able_trigger> - the SQL to enable/disable all triggers on the update table + +=back + +=cut + +sub parse_input_file { + my $input_fh = shift || return undef; + my $file = shift; + + my $sql = ''; + my $desc = ''; + my $range_sql = ''; + my $create_func_sql = ''; + my $drop_func_sql = ''; + my $able_trigger = ''; + my $truncate_sql = ''; + + # String SQL statement together + while (<$input_fh>) { + + # Kludge: remove anything that isn't ASCII 20-127 or whitespace + # (why are we getting weird characters in front of the first line?) + s/[^\x{21}-\x{7E}\s]//g; + + # First comment is the description + if ( $desc eq '' && /^\s*--\s*/ ) { + chomp; + s/^\s*--\s*//; # strip off dashes and leading whitespace + $desc = $_; + next; + } + + # Ignore comments and blank lines + next if /^\s*--/; + next if /^\s*$/; #* this comment helps syntax highlighting + + # Find the min and max ids select statement + if ( $range_sql eq '' && /^\s*SELECT\s+MAX/i ) { + chomp; + $range_sql = $_; + next; + } + + # Find drop function SQL + if ( $drop_func_sql eq '' && /^\s*DROP\s+FUNCTION\s+/i ) { + chomp; + $drop_func_sql = $_; + next; + } + + # Find truncate SQL + if ( $truncate_sql eq '' && /^\s*TRUNCATE\s+TABLE\s+/i ) { + chomp; + $truncate_sql = $_; + next; + } + + # Find enable/disable trigger statement + if ( $able_trigger eq '' && /^\s*ALTER\s+TABLE\s+/i ) { + chomp; + $able_trigger = $_; + next; + } + + # Get create function SQL + # Starts with "CREATE [OR REPLACE] FUNCTION..." + # Ends with "$$ LANGUAGE..." + if ( $create_func_sql eq '' && + /^\s*CREATE\s+(OR\s+REPLACE\s+)?FUNCTION\s+/i ) + { + while (1) { + $create_func_sql .= $_; + defined( $_ = <$input_fh> ) + or die "*** Readline failed: $!\nBad input script? $file\n"; + + if ( /^\s*\$\$\s+LANGUAGE\s+/i ) { + $create_func_sql .= $_; + last; + } + } + + next; + } + + # Add to execute SQL + $sql .= $_; + + } # end while readline SQL file + + close $input_fh or warn "*** Could not close $file\n$!\n"; + + my @return_data = ($sql, $desc, $range_sql, $create_func_sql, $drop_func_sql, $able_trigger, $truncate_sql); + + return wantarray ? @return_data : \@return_data; +} + +__END__ diff --git a/KCLS/bs_files/metarecord_remapping/metarecord_remapping_data.sql b/KCLS/bs_files/metarecord_remapping/metarecord_remapping_data.sql new file mode 100644 index 0000000000..a779c332d6 --- /dev/null +++ b/KCLS/bs_files/metarecord_remapping/metarecord_remapping_data.sql @@ -0,0 +1,17 @@ + +-- Must start with CREATE [OR REPLACE] FUNCTION and end with $$ LANGUAGE. +CREATE OR REPLACE FUNCTION metabib.wrap_triggered_reingest_for_bib_set(start_id BIGINT, end_id BIGINT) +RETURNS void AS $$ +DECLARE + rec RECORD; +BEGIN + PERFORM metabib.triggered_reingest_for_bib_set( start_id, end_id ); +END; +$$ LANGUAGE plpgsql; + +-- Get min and max id from update file +SELECT MAX(id), MIN(id) FROM biblio.record_entry; + +-- Execute part of the trigger with start and end id. +-- These are replaced by the post_update_driver script. +SELECT metabib.wrap_triggered_reingest_for_bib_set(~start_id~, ~end_id~); \ No newline at end of file diff --git a/KCLS/bs_files/metarecord_remapping_batcher_from_file.sh b/KCLS/bs_files/metarecord_remapping_batcher_from_file.sh new file mode 100755 index 0000000000..07fec6c8a8 --- /dev/null +++ b/KCLS/bs_files/metarecord_remapping_batcher_from_file.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# This script is to create batches of bib records for metarecord remapping. + +# To run example +# ./metarecord_remapping_batcher_from_file.sh testsample + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +# Parameters +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" +fi + +if [ "$5" = "-f" ]; then + FILE=$6 +else + echo Please enter export file with -f "export file" +fi + +echo hostname is $HNAME and port is $PORT +echo + +FILEOUT="$FILE.metarecord_reingest.batches" +date +echo "creating batches..." +while read line; do + echo "psql -d evergreen -h $HNAME -p $PORT -U evergreen -c 'SELECT metabib.triggered_reingest_for_bib_id($line)'" >> $FILEOUT +done < $FILE +date +echo "metarecord reingest batches created in $FILEOUT." \ No newline at end of file diff --git a/KCLS/bs_files/modified_since_export_record_importer.pl b/KCLS/bs_files/modified_since_export_record_importer.pl new file mode 100755 index 0000000000..56e4c8f316 --- /dev/null +++ b/KCLS/bs_files/modified_since_export_record_importer.pl @@ -0,0 +1,46 @@ +# -------------------------------------------------------------------------------------- +# FileName: modified_since_export_record_importer.pl +# +# Description: The purpose of this file is to import the updated bib records from BackStage +# that have been modified since being exported. This uses marc_stream_importer.pl to +# preserve fields on import. All marc records in the folder /unupdated/ are processed. +# +# To Run: perl modified_since_export_record_importer.pl --user username --password password --queue 4 --merge-profile 1086 +# +# Created Date: May 1, 2014 +# Created By: Kyle Tomita +# -------------------------------------------------------------------------------------- + +use strict; +use warnings; +use Getopt::Long; # for the options/parameters + +my $script = "./marc_stream_importer.pl"; + +# command line required options +my $usr = ""; +my $password = ""; +my $queue = ""; #1086 +my $merge_profile = ""; #4 +my $result = GetOptions("user=s" => \$usr, + "password=s" => \$password, + "queue=s" => \$queue, + "merge-profile=s" => \$merge_profile); + +my $command = ""; + +print ("}=}}}*> " . localtime . " Started importing updated bib records that have been modified since the export!\n"); + +system("sudo mkdir -p /opt/kcls/openils/var/data/offline/vandelay/kcls"); +system("sudo rm /opt/kcls/openils/var/data/offline/vandelay/kcls/*"); +system("sudo mv /var/KCLS_AUTH/unupdated/* /opt/kcls/openils/var/data/offline/vandelay/kcls"); +my $dir = '/opt/kcls/openils/var/data/offline/vandelay/kcls'; # directory that has the marc record files that were modified since the export +foreach my $spoolfile (glob("$dir/*.mrc")) { + print("<*{{{={ " . localtime . ": " . $spoolfile . "\n"); + $command = $script . " --spoolfile " . $spoolfile . " --user " . $usr . " --password " . $password . " --queue " . $queue . " --merge-profile " . $merge_profile . " --nodaemon"; + + system("/usr/bin/perl " . $command); # runs the command +} + +system("sudo mv /opt/kcls/openils/var/data/offline/vandelay/kcls/* /var/KCLS_AUTH/unupdated_done"); +print ("}=}}}*> " . localtime . " DONE!\n"); diff --git a/KCLS/bs_files/oils_header.pl b/KCLS/bs_files/oils_header.pl new file mode 100644 index 0000000000..6dc6be6f65 --- /dev/null +++ b/KCLS/bs_files/oils_header.pl @@ -0,0 +1,204 @@ +#!/usr/bin/perl + +#---------------------------------------------------------------- +# Generic header for tesing OpenSRF methods +#---------------------------------------------------------------- + +use strict; +use warnings; +use OpenSRF::Utils::JSON; +use Data::Dumper; +use OpenSRF::System; +use OpenSRF::AppSession; +use OpenSRF::EX qw(:try); +use Time::HiRes qw/time/; +use Digest::MD5 qw(md5_hex); +use OpenILS::Utils::Fieldmapper; +use OpenILS::Application::AppUtils; +use OpenSRF::Utils::SettingsClient; +use OpenSRF::Utils::Logger qw/:logger/; +use UNIVERSAL::require; + + +# Some useful objects +our $cache = "OpenSRF::Utils::Cache"; +our $apputils = "OpenILS::Application::AppUtils"; +our $memcache; +our $user; +our $authtoken; +our $authtime; + +# Some constants for our services +our $AUTH = 'open-ils.auth'; +our $STORAGE = 'open-ils.storage'; +our $SEARCH = 'open-ils.search'; +our $CIRC = 'open-ils.circ'; +our $CAT = 'open-ils.cat'; +our $MATH = 'opensrf.math'; +our $SETTINGS = 'opensrf.settings'; +our $ACTOR = 'open-ils.actor'; + +sub AUTH { return $AUTH; } +sub STORAGE { return $STORAGE; } +sub SEARCH { return $SEARCH; } +sub CIRC { return $CIRC; } +sub CAT { return $CAT; } +sub MATH { return $MATH; } +sub SETTINGS { return $SETTINGS; } +sub ACTOR { return $ACTOR; } + + +#---------------------------------------------------------------- +# Exit a script +#---------------------------------------------------------------- +sub err { + my ($pkg, $file, $line, $sub) = _caller(); + no warnings; + die "Script halted with error ". + "($pkg : $file : $line : $sub):\n" . shift() . "\n"; +} + +#---------------------------------------------------------------- +# Print with newline +#---------------------------------------------------------------- +sub printl { print "@_\n"; } + +#---------------------------------------------------------------- +# Print with Data::Dumper +#---------------------------------------------------------------- +sub debug { printl(Dumper(@_)); } + + +#---------------------------------------------------------------- +# This is not the function you're looking for +#---------------------------------------------------------------- +sub _caller { + my ($pkg, $file, $line, $sub) = caller(2); + if(!$line) { + ($pkg, $file, $line) = caller(1); + $sub = ""; + } + return ($pkg, $file, $line, $sub); +} + + +#---------------------------------------------------------------- +# Connect to the servers +#---------------------------------------------------------------- +sub osrf_connect { + my $config = shift; + err("Bootstrap config required") unless $config; + OpenSRF::System->bootstrap_client( config_file => $config ); + Fieldmapper->import(IDL => + OpenSRF::Utils::SettingsClient->new->config_value("IDL")); + reset_cstore(); +} + +sub reset_cstore { + my ($key) = grep { $_ =~ /OpenILS.*CStoreEditor/o } keys %INC; + return unless $key; + delete $INC{$key}; + my $h = $SIG{__WARN__}; + $SIG{__WARN__} = sub {}; + require OpenILS::Utils::CStoreEditor; + $SIG{__WARN__} = $h; +} + + +#---------------------------------------------------------------- +# Get a handle for the memcache object +#---------------------------------------------------------------- +sub osrf_cache { + $cache->use; + $memcache = $cache->new('global') unless $memcache; + return $memcache; +} + +#---------------------------------------------------------------- +# Is the given object an OILS event? +#---------------------------------------------------------------- +sub oils_is_event { + my $e = shift; + if( $e and ref($e) eq 'HASH' ) { + return 1 if defined($e->{ilsevent}); + } + return 0; +} + +sub oils_event_equals { + my( $e, $name ) = @_; + return 1 if (oils_is_event($e) and ($e->{textcode} eq $name)); + return 0; +} + +#---------------------------------------------------------------- +# If the given object is an event, this prints the event info +# and exits the script +#---------------------------------------------------------------- +sub oils_event_die { + my $evt = shift; + my ($pkg, $file, $line, $sub) = _caller(); + if(oils_is_event($evt)) { + if($evt->{ilsevent}) { + printl("\nReceived Event($pkg : $file : $line : $sub): \n" . Dumper($evt)); + exit 1; + } + } +} + + +#---------------------------------------------------------------- +# Login to the auth server and set the global $authtoken var +#---------------------------------------------------------------- +sub oils_login { + my( $username, $password, $type ) = @_; + + $type |= "staff"; + + my $seed = $apputils->simplereq( $AUTH, + 'open-ils.auth.authenticate.init', $username ); + err("No auth seed") unless $seed; + + my $response = $apputils->simplereq( $AUTH, + 'open-ils.auth.authenticate.complete', + { username => $username, + password => md5_hex($seed . md5_hex($password)), + type => $type }); + + err("No auth response returned on login") unless $response; + + oils_event_die($response); + + $authtime = $response->{payload}->{authtime}; + $authtoken = $response->{payload}->{authtoken}; + return $authtoken; +} + + +#---------------------------------------------------------------- +# Destroys the login session on the server +#---------------------------------------------------------------- +sub oils_logout { + $apputils->simplereq( + 'open-ils.auth', + 'open-ils.auth.session.delete', (@_ ? shift : $authtoken) ); +} + +#---------------------------------------------------------------- +# Fetches the user object and sets the global $user var +#---------------------------------------------------------------- +sub oils_fetch_session { + my $ses = shift; + my $resp = $apputils->simplereq( $AUTH, + 'open-ils.auth.session.retrieve', $ses, 'staff' ); + oils_event_die($resp); + return $user = $resp; +} + +#---------------------------------------------------------------- +# var $response = simplereq( $service, $method, @params ); +#---------------------------------------------------------------- +sub simplereq { return $apputils->simplereq(@_); } +sub osrf_request { return $apputils->simplereq(@_); } + +1; diff --git a/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.meta b/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.meta new file mode 100644 index 0000000000..ed493bac39 --- /dev/null +++ b/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.meta @@ -0,0 +1,25 @@ +--- #YAML:1.0 +name: MARC-Record +version: 2.0.3 +abstract: Perl extension for handling MARC records +author: + - Galen Charlton +license: perl +distribution_type: module +configure_requires: + ExtUtils::MakeMaker: 0 +build_requires: + ExtUtils::MakeMaker: 0 +requires: + Carp: 0 + File::Find: 0 + File::Spec: 0 + Test::More: 0 +no_index: + directory: + - t + - inc +generated_by: ExtUtils::MakeMaker version 6.55_02 +meta-spec: + url: http://module-build.sourceforge.net/META-spec-v1.4.html + version: 1.4 diff --git a/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.readme b/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.readme new file mode 100644 index 0000000000..dd4f0a8a27 --- /dev/null +++ b/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.readme @@ -0,0 +1,65 @@ +MARC::Record and its family +=========================== + +SYNOPSIS + +The MARC::* series of modules create a simple object-oriented +abstraction of MARC record handling. The files are: + +MARC::Doc::Tutorial + A tutorial explaining how to use MARC::Record. + +MARC::Record + The core class for representing a single MARC record. + +MARC::Field + Another core class for representing a single field in a record. + +MARC::Batch + The basic object for access to a batch of one or more MARC records. + +MARC::File + Base class for the MARC file. + +MARC::File::USMARC +MARC::File::MicroLIF + Subclasses of MARC::File specific to the USMARC and MicroLIF formats + +MARC::Lint + Extension to check MARC records for validity. + + +INSTALLATION + +To install this module type the following: + + perl Makefile.PL + make + make test + make install + + +DEPENDENCIES + +MARC::Record now requires a perl version >= 5.8.2 for processing unicode +correctly. + +COPYRIGHT AND LICENCE + +Copyright (C) 2001-2010 by contributors: + +2001-2007 Andy Lester +2002-2007 Ed Summers +2003-2005 Eric Lease Morgan +2003 Morbus Iff +2004-2005 Bryan Baldus +2004 Mark Jordan +2007 Mike Rylander +2007 Dan Scott +2009 Bill Dueber +2010 Galen Charlton +2010 Frédéric Demians +2010 Dan Wells + +This software is free software and may be distributed under the same +terms as Perl itself . diff --git a/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.tar.gz b/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.tar.gz new file mode 100644 index 0000000000..4cdb39345e Binary files /dev/null and b/KCLS/bs_files/perl_packages/MARC-Record-2.0.3.tar.gz differ diff --git a/KCLS/bs_files/perl_packages/MARC-XML-0.93.meta b/KCLS/bs_files/perl_packages/MARC-XML-0.93.meta new file mode 100644 index 0000000000..5fb562db5a --- /dev/null +++ b/KCLS/bs_files/perl_packages/MARC-XML-0.93.meta @@ -0,0 +1,24 @@ +--- #YAML:1.0 +name: MARC-XML +version: 0.93 +abstract: ~ +author: + - Ed Summers +license: perl +distribution_type: module +configure_requires: + ExtUtils::MakeMaker: 0 +build_requires: + ExtUtils::MakeMaker: 0 +requires: + MARC::Charset: 0.98 + MARC::Record: 2 + XML::SAX: 0.12 +no_index: + directory: + - t + - inc +generated_by: ExtUtils::MakeMaker version 6.55_02 +meta-spec: + url: http://module-build.sourceforge.net/META-spec-v1.4.html + version: 1.4 diff --git a/KCLS/bs_files/perl_packages/MARC-XML-0.93.readme b/KCLS/bs_files/perl_packages/MARC-XML-0.93.readme new file mode 100644 index 0000000000..f2307072cf --- /dev/null +++ b/KCLS/bs_files/perl_packages/MARC-XML-0.93.readme @@ -0,0 +1,31 @@ +MARC-XML +-------- + +MARC-XML is an extension to the MARC-Record distribution for working with +XML data encoded using the MARC21slim XML schema from the Library of Congress. + +For more details see: http://www.loc.gov/standards/marcxml/ + +INSTALLATION + +To install this module type the following: + + perl Makefile.PL + make + make test + make install + +DEPENDENCIES + +This module requires these other modules and libraries: + +- MARC::Record +- MARC::Charset +- XML::SAX + +COPYRIGHT AND LICENCE + +Copyright (C) 2003-2009 Ed Summers and contributors + +This library is free software; you can redistribute it and/or modify +it under the same terms as Perl itself. diff --git a/KCLS/bs_files/perl_packages/MARC-XML-0.93.tar.gz b/KCLS/bs_files/perl_packages/MARC-XML-0.93.tar.gz new file mode 100644 index 0000000000..a10731ca77 Binary files /dev/null and b/KCLS/bs_files/perl_packages/MARC-XML-0.93.tar.gz differ diff --git a/KCLS/bs_files/runMetarecordRemapping.sh b/KCLS/bs_files/runMetarecordRemapping.sh new file mode 100755 index 0000000000..265c754fe2 --- /dev/null +++ b/KCLS/bs_files/runMetarecordRemapping.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +# This script sets the config flags for doing a metarecord remapping only reingest and then resets them after running the metarecord remapping reingest. + +function timer() +{ + if [[ $# -eq 0 ]]; then + echo $(date '+%s') + else + local stime=$1 + etime=$(date '+%s') + + if [[ -z "$stime" ]]; then stime=$etime; fi + + dt=$((etime - stime)) + ds=$((dt % 60)) + dm=$(((dt / 60) % 60)) + dh=$((dt / 3600)) + printf '%d:%02d:%02d' $dh $dm $ds + fi +} + +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" +fi + +if [ "$5" = "-w" ]; then + PASSWORD=$6 +else + echo Please enter password with -w "password" +fi + +DEVTEST="0"; +# For dev testing only +if [ "$7" = "-dev" ]; then + DEVTEST="1" + EXPORTFILE=$8 +else + echo Please enter export file of ids with -dev "export file" +fi + +echo hostname is $HNAME and port is $PORT '\n' +tmr=$(timer) + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +date + +# Set config flags to only do metarecord remapping on bib record reingest. +psql -d evergreen -h $HNAME -U evergreen -f $REPO_ROOT/bs_files/import_workflow/before_metarecord_remapping.sql -p $PORT + +date + +if [ "$DEVTEST" = "0" ]; then + #run metarecord reingest on entire bib record set + perl $REPO_ROOT/bs_files/metarecord_remapping/data_update_driver.pl --dir metarecord_remapping/ --finished-dir metarecord_remapping/fin --database evergreen --host $HNAME --port $PORT --user evergreen --password $PASSWORD --max-processes 4 --rows 500 +else + #dev test environment, only run metarecord reingest on export set only + echo "in dev mode" + $REPO_ROOT/bs_files/metarecord_remapping_batcher_from_file.sh -h $HNAME -p $PORT -f $EXPORTFILE + EXPORTBATCH="$EXPORTFILE.metarecord_reingest.batches" + perl $REPO_ROOT/linking/disbatcher.pl -n 4 -f $EXPORTBATCH +fi +date + +# Set config flags to normal for bib record reingest +psql -d evergreen -h $HNAME -U evergreen -f $REPO_ROOT/bs_files/import_workflow/after_metarecord_remapping.sql -p $PORT + +date +printf 'Elapsed time: %s\n' $(timer $tmr) diff --git a/KCLS/bs_files/sample.json b/KCLS/bs_files/sample.json index 8851796d03..ac93985604 100644 --- a/KCLS/bs_files/sample.json +++ b/KCLS/bs_files/sample.json @@ -12,8 +12,8 @@ "authentication": { "username" : "admin", - "password" : "admin", - "workstation" : "BR1-PC30121", + "password" : "winter15c0ming", + "workstation" : "RE-PC30128", "type" : "Branch" } }, @@ -49,7 +49,7 @@ }, "import": { - "working_dir":"/home/kclsdev/24kcls_evergreen", + "working_dir":"/home/kclsdev/24kcls_evergreen/bs_files", "print_import":true, "print_keep":true, "print_delete":true, diff --git a/KCLS/bs_files/setup.sh b/KCLS/bs_files/setup.sh new file mode 100755 index 0000000000..e0da539de8 --- /dev/null +++ b/KCLS/bs_files/setup.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +sudo mkdir -v -p /var/KCLS_AUTH +sudo mkdir -v -p /var/KCLS_AUTH/logs/ +sudo mkdir -v -p /var/KCLS_AUTH/bibs_to_do/ +sudo mkdir -v -p /var/KCLS_AUTH/done_bibs/ +sudo mkdir -v -p /var/KCLS_AUTH/unupdated/ +sudo mkdir -v -p /var/KCLS_AUTH/unupdated_done/ +sudo mkdir -v -p /var/KCLS_AUTH/auths_to_do/ +sudo mkdir -v -p /var/KCLS_AUTH/done_auths/ +sudo mkdir -v -p /var/KCLS_AUTH/Backstage + +sudo chmod -R 777 /var/KCLS_AUTH + +cp sample.json /var/KCLS_AUTH/ + + diff --git a/KCLS/bs_files/update_driver.pl b/KCLS/bs_files/update_driver.pl new file mode 100644 index 0000000000..91b9e9906f --- /dev/null +++ b/KCLS/bs_files/update_driver.pl @@ -0,0 +1,531 @@ +#!perl -w + +=pod + +=head1 Name + +update_driver.pl + +=head1 Description + +This is the master script for running all other upgrade scripts. It groups the +scripts together by the first three digits of the scripts' name. Each group +is run in parallel. All children of a previous group finish before the next +group is run. This is so you can have scripts that depend on other scripts +executing first. + +=cut + +use strict; +use warnings; +use utf8; +use v5.8; + +=head1 Modules + +Modules used and what for. + +Note: to load a new module into your local system, execute + + $ perl -MCPAN -e shell + cpan> Module::Name + cpan> quit + +=over + +=item DBI + +All interactions with the database and SQL. + +=item DBD::Pg + +Specific protocol for PostGreSQL. + +=item Parallel::ForkManager + +Runs up to I number of processes in parallel. I is set by B<--max-processes>. + +=item Try::Tiny + +Use a try/catch form of statement. + +=item File::Basename + +Use basename() to strip off path and extention of a file name. + +=item Getopt::Long + +Retrieve long command line options (e.g. --xxx) and do simple validation. + +=item Term::ReadKey + +Used to hide console echoing while typing in passwords. + +=item Pod::Usage + +Used to display usage messages that are pulled from the program's POD. + +=back + +=cut + +use DBI; +use DBD::Pg; +use Parallel::ForkManager; +use Try::Tiny; +use File::Basename; +use Getopt::Long; +use Term::ReadKey; +use Pod::Usage; + +=head1 Usage + + update_driver.pl [--dir ] [--finished-dir ] + [--database ] [--host ] + [--user ] [--password ] + [--exit-on-error] [--max-processes ] + +=head1 Arguments + +=over + +=item B + +The directory containing the update scripts. It defaults to the current +directory. + +=item B + +The directory where the scripts are moved to when they finish correctly. It +defaults to a folder called 'finished'. + +=item B + +The database name. + +=item B + +The host name or IP address for the database. + +=item B + +The user name for the database. + +=item B + +The password for the database. If none is provided, it will ask for one. + +=item B + +Flag: should the script stop when it encounters an SQL error? If not, it will +continue processing. Regardless, an error file is created with the extension +'.err' that holds the error message. + +=item B + +Number: what is the maximum number of processes to run in parallel? The +default is four. + +=item B + +Display usage and exit. + +=back + +=cut + +my $dir = '.'; +my $finished_dir = 'finished'; +my $database = ''; +my $port = ''; +my $host = ''; +my $user = ''; +my $password = ''; +my $max_processes = 1; +my $help; +my $exit_on_error; + +# Use Getopt::Long to get the command line options. Use the POD section +# "Usage" if an option is entered incorrectly +GetOptions( + 'help!' => \$help, # default is false... + 'exit-on-error!' => \$exit_on_error, + 'dir=s' => \$dir, # strings... + 'finished-dir=s' => \$finished_dir, + 'database=s' => \$database, + 'port=s' => \$port, + 'host=s' => \$host, + 'user=s' => \$user, + 'password=s' => \$password, + 'max-processes=i' => \$max_processes # numeric +) or pod2usage( -verbose => 99, -sections => [ 'Usage' ], -exitval => 2 ); + +# Print the POD Usage and Arguments sections if the help flag is up +if ($help) { + pod2usage( + -verbose => 99, + -sections => [ 'Usage', 'Arguments' ], + -exitval => 1 ); +} + +=head1 Pre-loop + +Get command line options. Get password if none is supplied. Exit if B +does not exist. If B does not exist, create it. Setup database +parameters. Test database parameters to see if they connect correctly. If +B is set, make a callback for the parent process so it will die +if a child processes returns an error code. Remove all error files. These +would have been created by a previous run. + +=cut + +# Get password if not supplied +unless ($password) { + print "Type your password: "; + ReadMode('noecho'); # don't display characters while typing + chomp($password = ); + ReadMode(0); # back to normal + print "\n"; +} + +# Check the directories +$dir =~ s|\\|/|g; # backslashes to slashes +$dir =~ s|/$||; # remove trailing slash +$finished_dir =~ s|\\|/|; +$finished_dir =~ s|/$||; + +unless ( -d $dir ) { + die "$dir does not exist\n"; +} + +unless ( -d $finished_dir ) { + mkdir $finished_dir or die "Could not create $finished_dir\n$!\n"; +} + +# Database connect info +my $db_params = { + platform => 'Pg', # Always PostGreSQL + database => $database, + host => $host, + port => $port, # PostGres's default port + user => $user, + pw => $password +}; + +# Check that database info is correct +my $test_dbh = get_db_handle( $db_params ); +$test_dbh->disconnect; + +my $pm = Parallel::ForkManager->new($max_processes); + +# Callback that checks the exit status of the children. +# If we should exit on error, tell the parent to die. +if ($exit_on_error) { + $pm->run_on_finish( + sub { + my ($pid, $exit_code, $ident) = @_; + + if ($exit_code == 1) { + die "Child process encountered an error in the SQL\n"; + } elsif ($exit_code == 2) { + die "Child process encountered an error during rename\n"; + } elsif ($exit_code == 3) { + die "Child process encounted an error opening the script file\n"; + } + } + ); +} + +print "Removing error files...\n"; +unlink glob "$dir/*.err"; + +=head1 Main + +Slurp all files in the form 000xxx.sql, where 000 is three digits and xxx is any +text. Execute all files that start with 000 in parallel, then all the files +that start with 001, then 002, etc. All children of one group will finish +before the childern of the next group start. If any "digit" does not contain +any files, it is ignored. + +The files are assumed to be valid PostGreSQL files that are non-SELECT. The +first comment of the SQL file (--) is taken to be the description. If the SQL +executes without error the script is moved to the finished directory. + +=cut + +print "Begin executing update scripts...\n"; +my $time = time(); + +# Get a group of files to run in parallel +foreach my $digit (0..999) { + + # Group files as 000xxx.sql + my $formatted_digit = sprintf( "%03d", $digit ); + my @files = glob "$dir/*$formatted_digit*.sql"; + + # All of these processes will run in parallel, up to $max_processes + foreach my $file (@files) { + + # Forks and returns the pid for the child + my $pid = $pm->start and next; + + # Open file and get SQL statement + unless ( open (FH, '<', $file) ) { + + # Log error on failure + my $system_error = $!; + my ( $fail_fh, $error_file ) = get_error_file( $file ); + print $fail_fh "Unable to open $file for reading\n"; + print $fail_fh "$system_error\n"; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "Unable to open $file for reading\n"; + + # Signal the parent that the child could not open the file + $pm->finish(3); + } + + my $sql = ''; + my $desc = ''; + + # String SQL statement together + while () { + $sql .= $_; + + # First comment is the description + if ( $desc eq '' && /^\s*--\s*/ ) { + chomp; + s/^\s*--\s*//; # strip off dashes and leading whitespace + $desc = $_; + } + } + + close FH or warn "Could not close $file\n$!\n"; + print_time( $time ); + + # Execute the SQL + print "\t$file: $desc ($pid)\n"; + + if ( run_sql($db_params, $sql, $file, $desc ) ) { + + # Succesful finish, move script + my $base = basename $file; + my $finish_name = "$finished_dir/$base"; + + if ( rename $file, $finish_name ) { + + # Normal termination of the child process + $pm->finish; + } else { + + # Exit with error (rename) + $pm->finish(2); + } + } else { + + # Exit with error (script) + $pm->finish(1); + } + + } # foreach file + + # Wait for all the children to finish + $pm->wait_all_children; + +} # foreach digit + +print_time( $time ); + +print "Finished\n"; + +=head1 Subroutines + +=head2 get_db_handle + +Get a database handle + +=over + +=item Parameters + +B<$db_params> - reference to several DB parameters + +=item Returns + +B<$dbh> - database handle or zero + +=back + +=cut + +sub get_db_handle { + my $db_params = shift || return 0; + + my $platform = $db_params->{platform}; + my $database = $db_params->{database}; + my $host = $db_params->{host}; + my $port = $db_params->{port}; + my $user = $db_params->{user}; + my $pw = $db_params->{pw}; + + my $dsn = "dbi:$platform:dbname = $database; host = $host; port = $port"; + + my $dbh = DBI->connect( $dsn, $user, $pw, { + 'PrintError' => 1, + 'RaiseError' => 1, + 'PrintWarn' => 1, + 'AutoCommit' => 0 # Auto commit off so we can commit/rollback + }) or die "Unable to connect: " . $DBI::errstr . "\n"; + + return $dbh; +} + +=head2 run_sql + +Execute a non-SELECT SQL statement and capture any error output + +=over + +=item Parameters + +B<$db_params> - the DB parameters (ref to hash) + +B<$sql> - the SQL statement + +B<$file> - the file name + +B<$desc> - a description of the task (first comment) + +=item Returns + +1 = success, 0 = failure + +=item Side Effects + +Creates a file with the extension .err if there is an error executing the SQL + +=back + +=cut + +sub run_sql { + my $db_params = shift; + my $sql = shift; + my $file = shift; + my $desc = shift || 'SQL Script'; + + my $dbh = get_db_handle($db_params); + + unless (utf8::is_utf8($sql)){ + utf8::encode($sql); + } + + $dbh->do('SET statement_timeout = 0;'); + $dbh->commit; + try { + + $dbh->do($sql); + + } catch { + + # Log SQL error + my $err = $dbh->errstr; + $dbh->rollback; + my $rollback_err = $dbh->errstr; + warn "*** $file rolled back\n" unless $rollback_err; + $dbh->disconnect; + my $disconnect_err = $dbh->errstr; + my ( $fail_fh, $error_file ) = get_error_file( $file ); + + print $fail_fh "Can't execute SQL statement!\n"; + print $fail_fh "$file: $desc\n"; + print $fail_fh "$err\n"; + print $fail_fh "Rollback error: $rollback_err\n" if $rollback_err; + print $fail_fh "Disconnect error: $disconnect_err\n" if $disconnect_err; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Can't execute SQL statement! $file: $desc\n"; + + return 0; + }; + + # SQL execute succeeds, return true + $dbh->commit; + $dbh->disconnect; + + return 1; + +} + +=head2 get_error_file + +Create and open an error file. Put in a timestamp. The error file name is +the file name with the extention of .err. + +=over + +=item Parameters + +B<$file> - the file name to create the error file for + +=item Returns + +An array in list context; a reference to an array in scalar context + +[0] B<$fail_fh> - the file handle of the openned error file + +[1] B<$error_file> - the error file name + +=back + +=cut + +sub get_error_file { + my $file = shift || 'unknown'; + + my ($sec, $min, $hour, $mday, $mon, $year) = (localtime(time))[0..5]; + my $timestamp = "$hour:$min:$sec " . ($mon + 1) . "-$mday-" . ($year + 1900); + my ( $basename, $dir ) = fileparse( $file, '.sql' ); + my $error_file = "$dir$basename.err"; + my $fail_fh; + + open ($fail_fh, '>', $error_file) + or die "Could not open $error_file for writing\n"; + print $fail_fh "$timestamp\n"; + my @return_data = ( $fail_fh, $error_file ); + + return wantarray ? @return_data : \@return_data; +} + +=head2 print_time + +Print time elapsed in hours, minutes, and seconds + +=over + +=item Parameters + +B<$start> - the start time, taken from the I function + +=item Side Effects + +Prints elasped time to the standand out + +=back + +=cut + +sub print_time { + use integer; + + my $start = shift || 0; + my $elapsed = time() - $start; + my $hours = $elapsed / (60 * 60); + my $seconds = $elapsed % 60; + my $minutes = ($elapsed - $hours * 60 * 60) / 60; + + print "Time elapsed: "; + print "$hours hours, " if $hours; + print "$minutes minutes, " if $minutes; + print "$seconds seconds\n"; +} + +__END__ diff --git a/KCLS/linking/add_ids_to_a_db_table.sh b/KCLS/linking/add_ids_to_a_db_table.sh new file mode 100755 index 0000000000..645a12981b --- /dev/null +++ b/KCLS/linking/add_ids_to_a_db_table.sh @@ -0,0 +1,40 @@ +#!/bin/sh + +# To run example +# ./add_ids_to_a_db_table.sh testsample.txt + +FILE=$1 + +hostname="evergreentest.catalystitservices.com" +dbport="5415" +dbname="evergreen" +username="evergreen" + +date +TABLE="${FILE%%.*}"; +OUTPUTFILE="$FILE.results"; +TABLE="public.$TABLE"; +echo "Adding id's to table $TABLE" + +psql -d $dbname -h $hostname -p $dbport -U $username << EOF + CREATE TABLE $TABLE ( id BIGINT); +EOF + +while read line; do +psql -d $dbname -h $hostname -p $dbport -U $username << EOF + INSERT INTO $TABLE (id) VALUES ($line); +EOF +done < $FILE; + +psql -d $dbname -h $hostname -p $dbport -U $username -o $OUTPUTFILE<< EOF + SELECT DISTINCT(id) FROM $TABLE ORDER BY id; +EOF + +sed -i '1,2d' $OUTPUTFILE; +sed -i '$d' $OUTPUTFILE; +sed -i '$d' $OUTPUTFILE; + +#psql -d $dbname -h $hostname -p $dbport -U $username << EOF +# DROP TABLE $TABLE; +#EOF + diff --git a/KCLS/linking/auth_to_auth_link_ids_from_file.sh b/KCLS/linking/auth_to_auth_link_ids_from_file.sh new file mode 100755 index 0000000000..69e6e06555 --- /dev/null +++ b/KCLS/linking/auth_to_auth_link_ids_from_file.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +# To run example +# ./auth_to_auth_link_ids_from_file.sh testsample.txt + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +FILE=$1 +while read line; do + date + echo "auth to auth linking record = $line"; + perl $REPO_ROOT/linking/authority_authority_linker.pl --record=$line +done < $FILE \ No newline at end of file diff --git a/KCLS/linking/auth_to_bib_batch_builder.pl b/KCLS/linking/auth_to_bib_batch_builder.pl new file mode 100755 index 0000000000..48d54de371 --- /dev/null +++ b/KCLS/linking/auth_to_bib_batch_builder.pl @@ -0,0 +1,207 @@ +#!/usr/bin/perl +# Author: Dan Reuther (dreuther@catalystitservices.com) +# +# This program takes in lists of bib record ids from various sources +# combines and de-dupes them and then creates batches and a runnerrrr +# file that is used as input to disbatcher.pl or throttle_disbatcher.pl +# +# Parameters +# +# --bib_file == file of bib record ids to add to batches +# +# --create_date == Date in the 'YYYY-MM-DD' form. If provided +# the program will grab all authorities created after or on +# create_date and look for bibs that are linking candidates and +# put those in the batches. +# +# --batch_size == The size of the batch files output by this program +# Default is 500 +# +# --output_dir == Full path to the directory in which to store the +# batch files +# +# --disbatch_file == File to hold the commands to be run by the disbatcher +# or throttle_disbatcher script. This file will be stored in output_dir +# +#---------------------------------------------------------------------- + +use strict; +use warnings; + +use OpenSRF::System; +use DBI; +use Getopt::Long; + +use Data::Dumper; +my $bib_file; +my $create_date; +my $batch_size; +my $output_dir; +my $disbatch_file; +my %options; +my $result = GetOptions( + \%options, + 'bib_file=s' => \$bib_file, + 'create_date=s' => \$create_date, + 'batch_size=i' => \$batch_size, + 'output_dir=s' => \$output_dir, + 'disbatch_file=s' => \$disbatch_file, +); + +if(!$disbatch_file) { + print "Must provide a disbatch file with --disbatch_file.\n"; + exit; +} +if(!$output_dir) { + print "Must provide an output directory with --output_dir.\n"; + exit; +} +if(!$batch_size) { + $batch_size = 500; +} + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +my @final_bib_list; +print "\n\n"; +# If we have a file, read it and extract the bibs +if($bib_file) { + push(@final_bib_list, fetch_bibs_from_file($bib_file)); +} + +# If we have a create_date fetch all authority records created on or after +# that date and then find bib candidates that might link to them. +if($create_date) { + push(@final_bib_list, fetch_bibs_from_auths($create_date)); +} + +print "Found " . @final_bib_list . " bib records to link.\n\n"; + +# De-dupe list +print "Deduplicating the list of bibs ...\n"; +my %dedupe_hash = map { $_, 1 } @final_bib_list; +my @unique_bib_list = keys %dedupe_hash; + +print "Final List Contains " . @unique_bib_list . " Bib Records\n"; + +create_batches($batch_size, @unique_bib_list); + +sub create_batches { + my $batch_size = shift; + + my $batch_count = 0; + my $count = 0; + + print "\nCreating batches of " . $batch_size . " bib records...\n"; + open DISBATCH_FILE, ">" . $output_dir . "/" . $disbatch_file; + foreach(@_) { + $count++; + if($count == 1) { + open OUTPUT, ">" . $output_dir ."/bibs_to_link_" . $batch_count; + print "Writing file " . $output_dir . "/bibs_to_link_" . $batch_count . "\n"; + } + print OUTPUT "$_\n"; + if($count == $batch_size) { + close OUTPUT; + print DISBATCH_FILE "./authority_control_fields.pl --file " . $output_dir . "/bibs_to_link_" . $batch_count ." --refresh\n"; + $batch_count++; + $count = 0; + } + } + if($count) { + close OUTPUT; + print DISBATCH_FILE "./authority_control_fields.pl --file " . $output_dir . "/bibs_to_link_" . $batch_count ." --refresh\n"; + } + print "\nWriting disbatch file .. " . $output_dir . "/" . $disbatch_file . "\n\n"; + close DISBATCH_FILE; +} + +# Create files with Batch number bibs in them +# When a file is finished create line in batch runner file to execute it + +# Fetches bib candidates that might link to authorities records created on or +# after create_date +sub fetch_bibs_from_auths { + my $create_date = shift; + + my $query = " + SELECT id + FROM authority.record_entry + WHERE create_date >= '" . $create_date . "'"; + + my $dbh = connect_to_db(); + $dbh->do('SET statement_timeout = 0;'); + + my $sth = $dbh->prepare($query); + $sth->execute(); + print "Found " . $sth->rows() . " authority records created after " . $create_date . "\n"; + my @auth_ids; + while (my @row = $sth->fetchrow_array()) { + push(@auth_ids, @row); + } + $sth->finish(); + + my @bibs; + foreach(@auth_ids) { + print "Finding bibs for authority record #" . $_ . "\n"; + $query = " + SELECT * + FROM authority.unlinked_bibs_to_given_auth_id(" . $_ .")"; + $sth = $dbh->prepare($query); + $sth->execute(); + my @bibs_from_auths = $sth->fetchrow_array(); + foreach my $row (0..@bibs_from_auths-1) { + foreach my $column (0..@{$bibs_from_auths[$row]}-1) { + push(@bibs, $bibs_from_auths[$row][$column]); + } + } + + print " Found " . @{$bibs_from_auths[0]} . " bib records\n"; + $sth->finish(); + } + $dbh->disconnect(); + print "Bib records from authority records: " . @bibs . "\n\n"; + return(@bibs); +} + +# Reads in bibs from file and returna as an array +sub fetch_bibs_from_file { + my $bib_file = shift; + my @bibs; + open FILE, "<", $bib_file or die "Can't open file " . $bib_file; + while() { + chomp; + if($_) { + push(@bibs, $_); + } + } + close FILE; + print "Bib records from file: " . @bibs . "\n\n"; + return @bibs; +} + +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/linking/auth_to_bib_from_id.pl b/KCLS/linking/auth_to_bib_from_id.pl new file mode 100755 index 0000000000..71edfd8358 --- /dev/null +++ b/KCLS/linking/auth_to_bib_from_id.pl @@ -0,0 +1,68 @@ +#!/usr/bin/perl +# This script +# +# Usage: +# + +use strict; +use warnings; +use Getopt::Long; # for the options/parameters + +use DBI; +use OpenSRF::System; + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $auth_id; +my $filename = ''; +my $result = GetOptions( + "record=i" => \$auth_id, + "filename=s" => \$filename + ); + +my $dbh = connect_to_db(); + +my $query = 'SELECT authority.unlinked_bibs_to_given_auth_id(' . $auth_id . ')'; + +my $sth = $dbh->prepare($query); +$sth->execute(); + +open OUTPUT, ">>$filename"; +my @output; +@output = $sth->fetchrow(); +foreach (@output) { + my @row = @$_; + foreach (@row) { + #print "$_\n"; + print OUTPUT "$_\n"; + } +} +close OUTPUT; + +$sth->finish(); +$dbh->disconnect(); + +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/linking/auth_to_bib_link_ids_from_folder.sh b/KCLS/linking/auth_to_bib_link_ids_from_folder.sh new file mode 100755 index 0000000000..eab68c799e --- /dev/null +++ b/KCLS/linking/auth_to_bib_link_ids_from_folder.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# To run example + +FOLDER=$1 + +# Process each file in the given folder +for afile in $FOLDER/*.auth +do + date + echo "Processing $afile for bibs"; + + # Create similarly named file with .bib extension + newfile=$afile; + newfile=${newfile%.*}; + newfile=${newfile}.bib; + [ -f $newfile ] && rm $newfile; + + # Process each line of the afile + while read line; do + # Run pl script to generate bib id's + # For loop to add bib id's to new file. + perl auth_to_bib_from_id.pl --record=$line --filename=$newfile + #echo "auth = $line" >> $newfile; + done < $afile + date +done \ No newline at end of file diff --git a/KCLS/linking/auth_to_bib_linking_by_ids_batcher_from_file.sh b/KCLS/linking/auth_to_bib_linking_by_ids_batcher_from_file.sh new file mode 100755 index 0000000000..40d0bdcfc1 --- /dev/null +++ b/KCLS/linking/auth_to_bib_linking_by_ids_batcher_from_file.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +# This script is to create the commands to perform auth to bib linking on a file of bib id's. + +# To run example +# ./auth_to_bib_linking_by_ids_batcher_from_file.sh testsample.txt + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +FILE=$1 +FILEOUT="$1.out" +date +while read line; do + echo "perl $REPO_ROOT/linking/authority_control_fields.pl --start_id=$line --end_id=$line --refresh" >> $FILEOUT +done < $FILE +date \ No newline at end of file diff --git a/KCLS/linking/auth_to_bib_linking_by_ids_from_file.sh b/KCLS/linking/auth_to_bib_linking_by_ids_from_file.sh new file mode 100755 index 0000000000..706c2b6360 --- /dev/null +++ b/KCLS/linking/auth_to_bib_linking_by_ids_from_file.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# This script is to create the commands to perform auth to bib linking on a file of bib id's. + +# To run example +# ./auth_to_bib_linking_by_ids_from_file.sh testsample.txt + +# Get the repo root directory +REPO_ROOT=$(git rev-parse --show-toplevel) + +FILE=$1 +date +while read line; do + perl $REPO_ROOT/linking/authority_control_fields.pl --start_id=$line --end_id=$line --refresh +done < $FILE +date \ No newline at end of file diff --git a/KCLS/linking/authority_authority_linker.pl b/KCLS/linking/authority_authority_linker.pl new file mode 100755 index 0000000000..81b1c116aa --- /dev/null +++ b/KCLS/linking/authority_authority_linker.pl @@ -0,0 +1,302 @@ +#!/usr/bin/perl + +use strict; +use warnings; +use DBI; +use Getopt::Long; +use MARC::Record; +use MARC::File::XML (BinaryEncoding => 'UTF-8'); +use MARC::Charset; +use OpenSRF::System; +use OpenILS::Utils::Fieldmapper; +use OpenSRF::Utils::SettingsClient; +use OpenSRF::EX qw/:try/; +use Encode; +use Unicode::Normalize; +use OpenILS::Application::AppUtils; +use Data::Dumper; +use Pod::Usage qw/ pod2usage /; + +MARC::Charset->assume_unicode(1); + +my $acsaf_cache = {}; + +sub get_acsaf { + my ($e, $id) = @_; + + $acsaf_cache->{$id} ||= + $e->retrieve_authority_control_set_authority_field([ + $id, + {flesh => 1, flesh_fields => {acsaf => ["main_entry"]}} + ]); + return $acsaf_cache->{$id}; +} + +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} + +# I can't believe this isn't already in a sub somewhere? We seem to repeat +# these steps all over the place, which is very much "bad code smell." +sub marcxml_eg { + my ($xml) = @_; # a string, not an object, to be clear + + $xml =~ s/\n//sgo; + $xml =~ s/^<\?xml.+\?\s*>//go; + $xml =~ s/>\s+entityize($xml); +} + +sub matchable_string { + my ($field, $sf_list, $joiner) = @_; + $joiner ||= ' '; + + return join($joiner, map { $field->subfield($_) } split "", $sf_list); +} + +# ########### main +my ($start_id, $end_id); +my $bootstrap = '/openils/conf/opensrf_core.xml'; +my @records; + +my %options; +my $result = GetOptions( + \%options, + 'configuration=s' => \$bootstrap, + 'record=i' => \@records, + 'all', 'help', 'debug', + 'start_id=i' => \$start_id, + 'end_id=i' => \$end_id +); + +pod2usage(0) if not $result or $options{help}; + +print "OpenSRF bootstrap and fieldmapper import...\n" if $options{debug}; +OpenSRF::System->bootstrap_client(config_file => $bootstrap); +Fieldmapper->import(IDL => OpenSRF::Utils::SettingsClient->new->config_value("IDL")); + +# must be loaded and initialized after the IDL is parsed + +print "Loading CStoreEditor ...\n" if $options{debug}; + +use OpenILS::Utils::CStoreEditor; +OpenILS::Utils::CStoreEditor::init(); + +my $e = OpenILS::Utils::CStoreEditor->new; + +my $query = q{ + SELECT + source, + ARRAY_TO_STRING(ARRAY_AGG(target || ',' || field), ';') AS links + FROM ( + SELECT sh1.record AS target, + sh2.record AS source, + sh2.atag AS field + FROM authority.simple_heading sh1 + JOIN authority.simple_heading sh2 USING (sort_value) + JOIN authority.control_set_authority_field af1 ON (sh1.atag = af1.id AND af1.main_entry IS NULL) + JOIN authority.control_set_authority_field af2 ON (sh2.atag = af2.id AND af2.main_entry IS NOT NULL AND af2.linking_subfield IS NOT NULL) + %s -- where clause here + EXCEPT SELECT target, source, field FROM authority.authority_linking + ) x GROUP BY 1 +}; + +my @bind_params; +if (@records) { + $query = sprintf($query, "WHERE sh2.record = ?"); + @bind_params = @records; # should be just one scalar in this array. +} elsif ($options{all}) { + $query = sprintf($query, ""); # no where clause +} elsif ($start_id and $end_id) { + $query = sprintf($query, "WHERE sh2.record BETWEEN ? AND ?"); + @bind_params = ($start_id, $end_id); +} else { + pod2usage(0); +} + +print "SQL, params: ", Dumper($query, \@bind_params), "\n" if $options{debug}; +my $dbh = connect_to_db; # dies if any problem +$dbh->do('SET statement_timeout = 0'); + +my $sth = $dbh->prepare($query); + +print "Executing query ...\n" if $options{debug}; +$sth->execute(@bind_params); + +my $problems = 0; +my $start_time = localtime(); +print "Start " . $start_time . " for records " . $start_id . " to " . $end_id . "\n"; +while (my ($src, $links) = $sth->fetchrow_array) { + print "src: $src\n" if $options{debug}; + + try { + my $src_rec = $e->retrieve_authority_record_entry($src) or + die $e->die_event; + my $src_marc = MARC::Record->new_from_xml($src_rec->marc); + + for my $link (split ';', $links) { + my ($target, $field_id) = split ',', $link; + + print "target: $target, field_id: $field_id\n" if $options{debug}; + + my $target_rec = $e->retrieve_authority_record_entry($target) or + die $e->die_event; + my $target_marc = MARC::Record->new_from_xml($target_rec->marc); + my $cni = $target_marc->field('003')->data; + + my $acsaf = get_acsaf($e, $field_id) or die $e->die_event; + + for my $field ($src_marc->field($acsaf->tag)) { + my $src_string = matchable_string( + $field, $acsaf->main_entry->display_sf_list, $acsaf->main_entry->joiner + ); + + print("at field ", $acsaf->id, " (", $acsaf->tag, + "), trying to match '$src_string'...\n") if $options{debug}; + + my ($tfield) = $target_marc->field($acsaf->main_entry->tag); + if(defined $tfield) { + my $target_string = matchable_string( + $tfield, $acsaf->main_entry->display_sf_list, $acsaf->main_entry->joiner + ); + + if ($target_string eq $src_string) { + print "got a match ...\n" if $options{debug}; + $field->update('0' => "($cni)$target"); + } + } + else { + } + } + } + + $src_rec->marc(marcxml_eg($src_marc->as_xml_record)); + $e->xact_begin; + $e->update_authority_record_entry($src_rec) or + die $e->die_event; + $e->xact_commit; + + } otherwise { + my $err = shift; + print STDERR "\nRecord # $src : ", + (ref $err eq "HASH" ? Dumper($err) : $err), "\n"; + + # Reset SAX parser so that one bad record doesn't + # kill the entire process. + + import MARC::File::XML; + $problems++; + } +} +my $end_time = localtime(); +print "----- Stop " . $end_time . " for records " . $start_id . " to " . $end_id . "\n"; + +exit ($problems > 0); + +__END__ + +=head1 NAME + +authority_authority_linker.pl - Link reference headings in authority records to main entry headings in other authority records + +=head1 SYNOPSIS + +authority_authority_linker.pl [B<--configuration>=I] +[[B<--record>=I[ B<--record>=I]]] | [B<--all>] | [B<--start_id>=I B<--end_id>=I] + +=head1 DESCRIPTION + +For a given set of records, find authority reference headings that also +appear as main entry headings in any other authority record. In the +specific MARC field of the authority record (source) containing the reference +heading with such a match in another authority record (target), add a subfield +0 (zero) referring to the target record by ID. + +=head1 OPTIONS + +=over + +=item * B<-c> I, B<--configuration>=I + +Specifies the OpenSRF configuration file used to connect to the OpenSRF router. +Defaults to F + +=item * B<-r> I, B<--record>=I + +Specifies the authority record ID (found in the C +column) of the B record to process. This option may be specified more +than once to process multiple records in a single run. + +=item * B<-a>, B<--all> + +Specifies that all authority records should be processed. For large +databases, this may take an extraordinarily long amount of time. + +=item * B<-s> I, B<--start_id>=I + +Specifies the starting ID of the range of authority records to process. +This option is ignored unless it is accompanied by the B<-e> or B<--end_id> +option. + +=item * B<-e> I, B<--end_id>=I + +Specifies the ending ID of the range of authority records to process. +This option is ignored unless it is accompanied by the B<-s> or B<--start> +option. + +=back + +=head1 EXAMPLES + + authority_authority_linker.pl --start_id 1 --end_id 50000 + +Processes the authority records with IDs between 1 and 50,000 using the +default OpenSRF configuration file for connection information. + +=head1 AUTHOR + +Lebbeous Fogle-Weekley + +=head1 COPYRIGHT AND LICENSE + +Copyright (C) 2013 Equinox Software, Inc. + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. + +=cut diff --git a/KCLS/linking/authority_authority_linker_batcher.pl b/KCLS/linking/authority_authority_linker_batcher.pl new file mode 100755 index 0000000000..cb5c72ce27 --- /dev/null +++ b/KCLS/linking/authority_authority_linker_batcher.pl @@ -0,0 +1,118 @@ +#!/usr/bin/perl +# --------------------------------------------------------------- +# Kyle Tomita +# Created from authority_control_fields_batcher.pl +# by Jason Stephenson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- + +# Outputs command lines for running authority_authority_linker.pl over +# $batch_size numbers of bibs. The output is printed to standard +# output and can be redirected to a file. It is suitable for use with +# disbatcher.pl (available elsewhere). + +# The default batch size is 10,000 records. You can change this value +# by specifying the --batch-size (-b) option with a numeric argument. +# For instance, to run batches of 1,000 records you could use: +# +# authority_authority_linker_batcher.pl -b 1000 + +# You can specify a lower bound. This is an integer value that the +# bib retrieval will start at, so any batches will start at biblio +# recorc entries with an id greater than this value. You specify this +# with the --lower-bound (-l) option: +# +# authority_authority_linker_batcher.pl --lower-bound 1380695 +# +# This option is useful if you ran some batches previously and want to +# pick up any bibs added since the last batch. To do this, you'd +# specify the --end_id from the last line of your previous batch as +# the lower bound. +# +# The default lower bound is 0 to run over all of your regular biblio +# record entries. + +# Naturally, the options can be combined. + +use strict; +use warnings; +use DBI; +use Getopt::Long; +use OpenSRF::System; + +my $batch_size = 1000; +my $lower_bound = 0; + +my $hostname = 'evergreentest.catalystitservices.com'; +my $portnum = '5415'; + +my $result = GetOptions("lower-bound=i" => \$lower_bound, + "batch-size=i" => \$batch_size, + "hostname=s" => \$hostname, + "portnum=s" => \$portnum); + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); +my $sc = OpenSRF::Utils::SettingsClient->new; +my $platform = $sc->config_value( reporter => setup => database => 'driver' ); +my $host = $sc->config_value( reporter => setup => database => 'host' ); +my $port = $sc->config_value( reporter => setup => database => 'port' ); +my $database = $sc->config_value( reporter => setup => database => 'db' ); +if (!$database) { + $database = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $database; +} +my $user = $sc->config_value( reporter => setup => database => 'user' ); +my $pw = $sc->config_value( reporter => setup => database => 'pw' ); +my $dsn = "dbi:$platform:dbname = $database; host = $host; port = $port"; + +my $dbh = DBI->connect($dsn,$user, $pw); + +$dbh->do('SET statement_timeout = 0;'); + +my $q = < $lower_bound +AND (source IS NULL + OR source IN (1,2)) +ORDER BY id ASC +END_OF_Q + +my $ids = $dbh->selectall_arrayref($q); +my ($start, $end, $count) = (0, 0, 0); +foreach (@$ids) { + $count++; + $end = $_->[0]; + if ($count == 1) { + $start = $_->[0]; + } + if ($count == $batch_size) { + print_it($start, $end); + $count = 0; + } +} +# Catch the leftovers. +if ($count) { + print_it($start, $end); +} + +sub print_it { + my ($start, $end) = @_; + print("./authority_authority_linker.pl "); + if ($start == $end) { + printf("--record=%d\n", $start); + } else { + printf("--start_id=%d --end_id=%d\n", $start, $end); + } +} diff --git a/KCLS/linking/authority_control_fields.pl b/KCLS/linking/authority_control_fields.pl new file mode 100755 index 0000000000..542c5aea0a --- /dev/null +++ b/KCLS/linking/authority_control_fields.pl @@ -0,0 +1,807 @@ +#!/usr/bin/perl +# Copyright (C) 2010-2011 Laurentian University +# Author: Dan Scott +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- + +use strict; +use warnings; +use DBI; +use Getopt::Long; +use MARC::Record; +use MARC::File::XML (BinaryEncoding => 'UTF-8'); +use MARC::Charset; +use OpenSRF::System; +use OpenILS::Utils::Fieldmapper; +use OpenSRF::Utils::SettingsClient; +use OpenSRF::EX qw/:try/; +use Encode; +use Unicode::Normalize; +use OpenILS::Application::AppUtils; +use Data::Dumper; +use Pod::Usage qw/ pod2usage /; + +MARC::Charset->assume_unicode(1); + +my ($start_id, $end_id, $refresh); +my ($days_back); +my $input_file =''; +my $bootstrap = '/openils/conf/opensrf_core.xml'; +my @records; + +my %options; +my $result = GetOptions( + \%options, + 'configuration=s' => \$bootstrap, + 'record=i' => \@records, + 'refresh' => \$refresh, + 'all', 'help', + 'start_id=i' => \$start_id, + 'end_id=i' => \$end_id, + 'days_back=i' => \$days_back, + 'file=s' => \$input_file, +); + +if (!$result or $options{help}) { + pod2usage(0); +} + +if ($start_id && $days_back) { + print "Can't use both start ID and days back!\n"; + exit; +} + +OpenSRF::System->bootstrap_client(config_file => $bootstrap); +Fieldmapper->import(IDL => OpenSRF::Utils::SettingsClient->new->config_value("IDL")); + +# must be loaded and initialized after the IDL is parsed +use OpenILS::Utils::CStoreEditor; +OpenILS::Utils::CStoreEditor::init(); + +my $e = OpenILS::Utils::CStoreEditor->new; +my $undeleted; +if ($options{all}) { + # get a list of all non-deleted records from Evergreen + # open-ils.cstore open-ils.cstore.direct.biblio.record_entry.id_list.atomic {"deleted":"f"} + $undeleted = $e->request( + 'open-ils.cstore.direct.biblio.record_entry.id_list.atomic', + [{deleted => 'f'}, {id => { '>' => 0}}] + ); + @records = @$undeleted; +} + +if ($start_id and $end_id) { + @records = ($start_id .. $end_id); +} + +if (defined $days_back) { +@records=(); + +# Grab DB information from local settings +my $sc = OpenSRF::Utils::SettingsClient->new; +my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); +my $db_host = $sc->config_value( reporter => setup => database => 'host' ); +my $db_port = $sc->config_value( reporter => setup => database => 'port' ); +my $db_name = $sc->config_value( reporter => setup => database => 'db' ); +if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; +} +my $db_user = $sc->config_value( reporter => setup => database => 'user' ); +my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + +die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + +my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; +my $dbh = DBI->connect($dsn,$db_user,$db_pw, {AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1}) or die "database connection error"; + +# SQL Used to gather a list of ID's +my $idstatement = $dbh->prepare("SELECT DISTINCT(id) AS id FROM biblio.record_entry where (date(create_date) = date(now()) or date(edit_date) = date((NOW() - '$days_back day'::interval)))"); + +# Load the list of ID's into the records array +$idstatement->execute(); + while (my $ref = $idstatement->fetchrow_hashref()) { + my $id_ref = $ref->{"id"}; # the column name in our sql query is "id" + push(@records, $id_ref); + } +} + +if($input_file) { + open FILE, "<", $input_file or die "Can't open file " . $input_file; + while() { + chomp; + if($_) { + push(@records, $_); + } + } + close FILE; +} +# print Dumper($undeleted, \@records); + +# Hash of controlled fields & subfields in bibliographic records, and their +# corresponding controlling fields & subfields in the authority record +# +# So, if the bib 650$a can be controlled by an auth 150$a, that maps to: +# 650 => { a => { 150 => 'a'}} +my %controllees = ( + 100 => { a => { 100 => 'a' }, + b => { 100 => 'b' }, + c => { 100 => 'c' }, + d => { 100 => 'd' }, + f => { 100 => 'f' }, + g => { 100 => 'g' }, + h => { 100 => 'h' }, + k => { 100 => 'k' }, + l => { 100 => 'l' }, + m => { 100 => 'm' }, + n => { 100 => 'n' }, + o => { 100 => 'o' }, + p => { 100 => 'p' }, + q => { 100 => 'q' }, + r => { 100 => 'r' }, + s => { 100 => 's' }, + t => { 100 => 't' }, + y => { 100 => 'y' }, + z => { 100 => 'z' }, + }, + 110 => { a => { 110 => 'a' }, + b => { 110 => 'b' }, + c => { 110 => 'c' }, + d => { 110 => 'd' }, + f => { 110 => 'f' }, + g => { 110 => 'g' }, + h => { 110 => 'h' }, + k => { 110 => 'k' }, + l => { 110 => 'l' }, + m => { 110 => 'm' }, + n => { 110 => 'n' }, + o => { 110 => 'o' }, + p => { 110 => 'p' }, + r => { 110 => 'r' }, + s => { 110 => 's' }, + t => { 110 => 't' }, + y => { 110 => 'y' }, + z => { 110 => 'z' }, + }, + 111 => { a => { 111 => 'a' }, + b => { 111 => 'b' }, + c => { 111 => 'c' }, + d => { 111 => 'd' }, + e => { 111 => 'e' }, + f => { 111 => 'f' }, + g => { 111 => 'g' }, + h => { 111 => 'h' }, + k => { 111 => 'k' }, + l => { 111 => 'l' }, + m => { 111 => 'm' }, + n => { 111 => 'n' }, + o => { 111 => 'o' }, + p => { 111 => 'p' }, + q => { 111 => 'q' }, + r => { 111 => 'r' }, + s => { 111 => 's' }, + t => { 111 => 't' }, + u => { 111 => 'u' }, + y => { 111 => 'y' }, + z => { 111 => 'z' }, + }, + 130 => { a => { 130 => 'a' }, + d => { 130 => 'd' }, + f => { 130 => 'f' }, + g => { 130 => 'g' }, + h => { 130 => 'h' }, + k => { 130 => 'k' }, + l => { 130 => 'l' }, + m => { 130 => 'm' }, + n => { 130 => 'n' }, + o => { 130 => 'o' }, + p => { 130 => 'p' }, + r => { 130 => 'r' }, + s => { 130 => 's' }, + t => { 130 => 't' }, + x => { 130 => 'x' }, + y => { 130 => 'y' }, + z => { 130 => 'z' }, + }, + 400 => { a => { 100 => 'a' }, + b => { 100 => 'b' }, + c => { 100 => 'c' }, + d => { 100 => 'd' }, + f => { 100 => 'f' }, + g => { 100 => 'g' }, + h => { 100 => 'h' }, + k => { 100 => 'k' }, + l => { 100 => 'l' }, + m => { 100 => 'm' }, + n => { 100 => 'n' }, + o => { 100 => 'o' }, + p => { 100 => 'p' }, + q => { 100 => 'q' }, + r => { 100 => 'r' }, + s => { 100 => 's' }, + t => { 100 => 't' }, + y => { 100 => 'y' }, + z => { 100 => 'z' }, + }, + 410 => { a => { 110 => 'a' }, + b => { 110 => 'b' }, + c => { 110 => 'c' }, + d => { 110 => 'd' }, + f => { 110 => 'f' }, + g => { 110 => 'g' }, + h => { 110 => 'h' }, + k => { 110 => 'k' }, + l => { 110 => 'l' }, + m => { 110 => 'm' }, + n => { 110 => 'n' }, + o => { 110 => 'o' }, + p => { 110 => 'p' }, + r => { 110 => 'r' }, + s => { 110 => 's' }, + t => { 110 => 't' }, + y => { 110 => 'y' }, + z => { 110 => 'z' }, + }, + 411 => { a => { 111 => 'a' }, + b => { 111 => 'b' }, + c => { 111 => 'c' }, + d => { 111 => 'd' }, + e => { 111 => 'e' }, + f => { 111 => 'f' }, + g => { 111 => 'g' }, + h => { 111 => 'h' }, + k => { 111 => 'k' }, + l => { 111 => 'l' }, + m => { 111 => 'm' }, + n => { 111 => 'n' }, + o => { 111 => 'o' }, + p => { 111 => 'p' }, + q => { 111 => 'q' }, + r => { 111 => 'r' }, + s => { 111 => 's' }, + t => { 111 => 't' }, + u => { 111 => 'u' }, + y => { 111 => 'y' }, + z => { 111 => 'z' }, + }, + 600 => { a => { 100 => 'a' }, + b => { 100 => 'b' }, + c => { 100 => 'c' }, + d => { 100 => 'd' }, + f => { 100 => 'f' }, + g => { 100 => 'g' }, + h => { 100 => 'h' }, + k => { 100 => 'k' }, + l => { 100 => 'l' }, + m => { 100 => 'm' }, + n => { 100 => 'n' }, + o => { 100 => 'o' }, + p => { 100 => 'p' }, + q => { 100 => 'q' }, + r => { 100 => 'r' }, + s => { 100 => 's' }, + t => { 100 => 't' }, + v => { 100 => 'v' }, + x => { 100 => 'x' }, + y => { 100 => 'y' }, + z => { 100 => 'z' }, + }, + 610 => { a => { 110 => 'a' }, + b => { 110 => 'b' }, + c => { 110 => 'c' }, + d => { 110 => 'd' }, + f => { 110 => 'f' }, + g => { 110 => 'g' }, + h => { 110 => 'h' }, + k => { 110 => 'k' }, + l => { 110 => 'l' }, + m => { 110 => 'm' }, + n => { 110 => 'n' }, + o => { 110 => 'o' }, + p => { 110 => 'p' }, + r => { 110 => 'r' }, + s => { 110 => 's' }, + t => { 110 => 't' }, + v => { 110 => 'v' }, + x => { 110 => 'x' }, + y => { 110 => 'y' }, + z => { 110 => 'z' }, + }, + 611 => { a => { 111 => 'a' }, + b => { 111 => 'b' }, + c => { 111 => 'c' }, + d => { 111 => 'd' }, + e => { 111 => 'e' }, + f => { 111 => 'f' }, + g => { 111 => 'g' }, + h => { 111 => 'h' }, + k => { 111 => 'k' }, + l => { 111 => 'l' }, + m => { 111 => 'm' }, + n => { 111 => 'n' }, + o => { 111 => 'o' }, + p => { 111 => 'p' }, + q => { 111 => 'q' }, + r => { 111 => 'r' }, + s => { 111 => 's' }, + t => { 111 => 't' }, + u => { 111 => 'u' }, + v => { 111 => 'v' }, + x => { 111 => 'x' }, + y => { 111 => 'y' }, + z => { 111 => 'z' }, + }, + 630 => { a => { 130 => 'a' }, + d => { 130 => 'd' }, + f => { 130 => 'f' }, + g => { 130 => 'g' }, + h => { 130 => 'h' }, + k => { 130 => 'k' }, + l => { 130 => 'l' }, + m => { 130 => 'm' }, + n => { 130 => 'n' }, + o => { 130 => 'o' }, + p => { 130 => 'p' }, + r => { 130 => 'r' }, + s => { 130 => 's' }, + t => { 130 => 't' }, + v => { 130 => 'v' }, + x => { 130 => 'x' }, + y => { 130 => 'y' }, + z => { 130 => 'z' }, + }, + 650 => { a => { 150 => 'a' }, + b => { 150 => 'b' }, + c => { 150 => 'c' }, + d => { 150 => 'd' }, + v => { 150 => 'v' }, + x => { 150 => 'x' }, + y => { 150 => 'y' }, + z => { 150 => 'z' }, + }, + 651 => { a => { 151 => 'a' }, + b => { 151 => 'b' }, + v => { 151 => 'v' }, + x => { 151 => 'x' }, + y => { 151 => 'y' }, + z => { 151 => 'z' }, + }, + 655 => { a => { 155 => 'a' }, + b => { 155 => 'b' }, + c => { 155 => 'c' }, + v => { 155 => 'v' }, + x => { 155 => 'x' }, + y => { 155 => 'y' }, + z => { 155 => 'z' }, + }, + 700 => { a => { 100 => 'a' }, + b => { 100 => 'b' }, + c => { 100 => 'c' }, + d => { 100 => 'd' }, + f => { 100 => 'f' }, + g => { 100 => 'g' }, + h => { 100 => 'h' }, + k => { 100 => 'k' }, + l => { 100 => 'l' }, + m => { 100 => 'm' }, + n => { 100 => 'n' }, + o => { 100 => 'o' }, + p => { 100 => 'p' }, + q => { 100 => 'q' }, + r => { 100 => 'r' }, + s => { 100 => 's' }, + t => { 100 => 't' }, + y => { 100 => 'y' }, + z => { 100 => 'z' }, + }, + 710 => { a => { 110 => 'a' }, + b => { 110 => 'b' }, + c => { 110 => 'c' }, + d => { 110 => 'd' }, + f => { 110 => 'f' }, + g => { 110 => 'g' }, + h => { 110 => 'h' }, + k => { 110 => 'k' }, + l => { 110 => 'l' }, + m => { 110 => 'm' }, + n => { 110 => 'n' }, + o => { 110 => 'o' }, + p => { 110 => 'p' }, + r => { 110 => 'r' }, + s => { 110 => 's' }, + t => { 110 => 't' }, + y => { 110 => 'y' }, + z => { 110 => 'z' }, + }, + 711 => { a => { 111 => 'a' }, + b => { 111 => 'b' }, + c => { 111 => 'c' }, + d => { 111 => 'd' }, + e => { 111 => 'e' }, + f => { 111 => 'f' }, + g => { 111 => 'g' }, + h => { 111 => 'h' }, + k => { 111 => 'k' }, + l => { 111 => 'l' }, + m => { 111 => 'm' }, + n => { 111 => 'n' }, + o => { 111 => 'o' }, + p => { 111 => 'p' }, + q => { 111 => 'q' }, + r => { 111 => 'r' }, + s => { 111 => 's' }, + t => { 111 => 't' }, + u => { 111 => 'u' }, + y => { 111 => 'y' }, + z => { 111 => 'z' }, + }, + 730 => { a => { 130 => 'a' }, + d => { 130 => 'd' }, + f => { 130 => 'f' }, + g => { 130 => 'g' }, + h => { 130 => 'h' }, + k => { 130 => 'k' }, + l => { 130 => 'l' }, + m => { 130 => 'm' }, + n => { 130 => 'n' }, + o => { 130 => 'o' }, + p => { 130 => 'p' }, + r => { 130 => 'r' }, + s => { 130 => 's' }, + t => { 130 => 't' }, + y => { 130 => 'y' }, + z => { 130 => 'z' }, + }, + 800 => { a => { 100 => 'a' }, + b => { 100 => 'b' }, + c => { 100 => 'c' }, + d => { 100 => 'd' }, + f => { 100 => 'f' }, + g => { 100 => 'g' }, + h => { 100 => 'h' }, + k => { 100 => 'k' }, + l => { 100 => 'l' }, + m => { 100 => 'm' }, + n => { 100 => 'n' }, + o => { 100 => 'o' }, + p => { 100 => 'p' }, + q => { 100 => 'q' }, + r => { 100 => 'r' }, + s => { 100 => 's' }, + t => { 100 => 't' }, + y => { 100 => 'y' }, + z => { 100 => 'z' }, + }, + 810 => { a => { 110 => 'a' }, + b => { 110 => 'b' }, + c => { 110 => 'c' }, + d => { 110 => 'd' }, + f => { 110 => 'f' }, + g => { 110 => 'g' }, + h => { 110 => 'h' }, + k => { 110 => 'k' }, + l => { 110 => 'l' }, + m => { 110 => 'm' }, + n => { 110 => 'n' }, + o => { 110 => 'o' }, + p => { 110 => 'p' }, + r => { 110 => 'r' }, + s => { 110 => 's' }, + t => { 110 => 't' }, + y => { 110 => 'y' }, + z => { 110 => 'z' }, + }, + 811 => { a => { 111 => 'a' }, + b => { 111 => 'b' }, + c => { 111 => 'c' }, + d => { 111 => 'd' }, + e => { 111 => 'e' }, + f => { 111 => 'f' }, + g => { 111 => 'g' }, + h => { 111 => 'h' }, + k => { 111 => 'k' }, + l => { 111 => 'l' }, + m => { 111 => 'm' }, + n => { 111 => 'n' }, + o => { 111 => 'o' }, + p => { 111 => 'p' }, + q => { 111 => 'q' }, + r => { 111 => 'r' }, + s => { 111 => 's' }, + t => { 111 => 't' }, + u => { 111 => 'u' }, + y => { 111 => 'y' }, + z => { 111 => 'z' }, + }, + 830 => { a => { 130 => 'a' }, + d => { 130 => 'd' }, + f => { 130 => 'f' }, + g => { 130 => 'g' }, + h => { 130 => 'h' }, + k => { 130 => 'k' }, + l => { 130 => 'l' }, + m => { 130 => 'm' }, + n => { 130 => 'n' }, + o => { 130 => 'o' }, + p => { 130 => 'p' }, + r => { 130 => 'r' }, + s => { 130 => 's' }, + t => { 130 => 't' }, + x => { 130 => 'x' }, + y => { 130 => 'y' }, + z => { 130 => 'z' }, + }, +); +my $start_time = localtime(); +if($input_file) { + print "Start " . $start_time . " for " . scalar(@records) . " records.\n"; +} else { + print "Start " . $start_time . " for records " . $start_id . " to " . $end_id . "\n"; +} +foreach my $rec_id (@records) { + # print "$rec_id\n"; + + # State variable; was the record changed? + my $changed = 0; + + # get the record + my $record = $e->retrieve_biblio_record_entry($rec_id); + next unless $record; + # print Dumper($record); + + try { + my $marc = MARC::Record->new_from_xml($record->marc()); + + # get the list of controlled fields + my @c_fields = keys %controllees; + + foreach my $c_tag (@c_fields) { + my @c_subfields = keys %{$controllees{"$c_tag"}}; + # print "Field: $field subfields: "; + # foreach (@subfields) { print "$_ "; } + + # Get the MARCXML from the record and check for controlled fields/subfields + my @bib_fields = ($marc->field($c_tag)); + foreach my $bib_field (@bib_fields) { + # print $_->as_formatted(); + + if ($refresh and defined(scalar($bib_field->subfield('0')))) { + $bib_field->delete_subfield(code => '0'); + $changed = 1; + } + + my %match_subfields; + my $match_tag; + my @searches; + foreach my $c_subfield (@c_subfields) { + my @sf_values = $bib_field->subfield($c_subfield); + if (@sf_values) { + # Give me the first element of the list of authority controlling tags for this subfield + # XXX Will we need to support more than one controlling tag per subfield? Probably. That + # will suck. Oh well, leave that up to Ole to implement. + $match_subfields{$c_subfield} = (keys %{$controllees{$c_tag}{$c_subfield}})[0]; + $match_tag = $match_subfields{$c_subfield}; + push @searches, map {{term => $_, subfield => $c_subfield}} @sf_values; + } + } + # print Dumper(\%match_subfields); + next if !$match_tag; + + my @tags = ($match_tag); + + # print "Controlling tag: $c_tag and match tag $match_tag\n"; + # print Dumper(\@tags, \@searches); + + # Now we've built up a complete set of matching controlled + # subfields for this particular field; let's check to see if + # we have a matching authority record + my $session = OpenSRF::AppSession->create("open-ils.search"); + my $validates = $session->request("open-ils.search.authority.validate.tag.id_list", + "tags", \@tags, "searches", \@searches + )->gather(); + $session->disconnect(); + + # print Dumper($validates); + + # Protect against failed (error condition) search request + if (!$validates) { + print STDERR "Search for matching authority failed; record # $rec_id\n"; + next if (!$changed); + } + + # Only add linking if one or more was found, but we may have changed + # the record already if in --refresh mode. + if (scalar(@$validates) > 0) { + + # Iterate through the returned authority record IDs to delete any + # matching $0 subfields already in the bib record + foreach my $auth_zero (@$validates) { + $bib_field->delete_subfield(code => '0', match => qr/\)$auth_zero$/); + } + + # Okay, we have a matching authority control; time to + # add the magical subfield 0. Use the first returned auth + # record as a match. + my $auth_id = @$validates[0]; + my $auth_rec = $e->retrieve_authority_record_entry($auth_id); + my $auth_marc = MARC::Record->new_from_xml($auth_rec->marc()); + if ($auth_marc->field('003')) { + my $cni = $auth_marc->field('003')->data(); + + $bib_field->add_subfields('0' => "($cni)$auth_id"); + $changed = 1; + } else { + print "Authority # $auth_id missing field '003'\n"; + next if (!$changed); + } + } + } + } + if ($changed) { + + # print $marc->as_formatted(); + my $xml = $marc->as_xml_record(); + $xml =~ s/\n//sgo; + $xml =~ s/^<\?xml.+\?\s*>//go; + $xml =~ s/>\s+entityize($xml); + + $record->marc($xml); + + my $editor = OpenILS::Utils::CStoreEditor->new(xact=>1); + if ($editor->update_biblio_record_entry($record)) { + $editor->commit(); + } else { + $editor->rollback(); + } + } + } otherwise { + my $err = shift; + print STDERR "\nRecord # $rec_id : $err\n"; + import MARC::File::XML; # reset SAX parser so that one bad record doesn't kill the entire process + } +} +my $end_time = localtime(); +if($input_file) { + print "----- Stop " . $end_time . " for " . scalar(@records) . " records.\n"; +} else { + print "----- Stop " . $end_time . " for records " . $start_id . " to " . $end_id . "\n"; +} + + +__END__ + +=head1 NAME + +authority_control_fields.pl - Controls fields in bibliographic records with authorities in Evergreen + +=head1 SYNOPSIS + +C [B<--configuration>=I] [B<--refresh>] +[[B<--record>=I[ B<--record>=I]]] | [B<--all>] | [B<--start_id>=I B<--end_id>=I] + +=head1 DESCRIPTION + +For a given set of records: + +=over + +=item * Iterate through the list of fields that are controlled fields + +=item * Iterate through the list of subfields that are controlled for +that given field + +=item * Search for a matching authority record for that combination of +field + subfield(s) + +=over + +=item * If we find a match, then add a $0 subfield to that field identifying +the controlling authority record + +=item * If we do not find a match, then insert a row into an "uncontrolled" +table identifying the record ID, field, and subfield(s) that were not controlled + +=back + +=item * Iterate through the list of floating subdivisions + +=over + +=item * If we find a match, then add a $0 subfield to that field identifying +the controlling authority record + +=item * If we do not find a match, then insert a row into an "uncontrolled" +table identifying the record ID, field, and subfield(s) that were not controlled + +=back + +=item * If we changed the record, update it in the database + +=back + +=head1 OPTIONS + +=over + +=item * B<-f>, B<--file> + +Specifies a file of bibs ids to link. + +=item * B<-c> I, B<--configuration>=I + +Specifies the OpenSRF configuration file used to connect to the OpenSRF router. +Defaults to F + +=item * B<-r> I, B<--record>=I + +Specifies the bibliographic record ID (found in the C +column) of the record to process. This option may be specified more than once +to process multiple records in a single run. + +=item * B<-a>, B<--all> + +Specifies that all bibliographic records should be processed. For large +databases, this may take an extraordinarily long amount of time. + +=item * B<-r>, B<--refresh> + +Specifies that all authority links should be removed from the target +bibliographic record(s). This will effectively rewrite all authority +linking anew. + +=item * B<-s> I, B<--start_id>=I + +Specifies the starting ID of the range of bibliographic records to process. +This option is ignored unless it is accompanied by the B<-e> or B<--end_id> +option. + +=item * B<-e> I, B<--end_id>=I + +Specifies the ending ID of the range of bibliographic records to process. +This option is ignored unless it is accompanied by the B<-s> or B<--start> +option. + +=back + +=head1 EXAMPLES + + authority_control_fields.pl --start_id 1 --end_id 50000 + +Processes the bibliographic records with IDs between 1 and 50,000 using the +default OpenSRF configuration file for connection information. + +=head1 AUTHOR + +Dan Scott + +=head1 COPYRIGHT AND LICENSE + +Copyright 2010-2011 by Dan Scott + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +=cut + diff --git a/KCLS/linking/authority_control_fields_batcher.pl b/KCLS/linking/authority_control_fields_batcher.pl index 8401904fbb..4171bfa1b6 100755 --- a/KCLS/linking/authority_control_fields_batcher.pl +++ b/KCLS/linking/authority_control_fields_batcher.pl @@ -47,14 +47,18 @@ use strict; use warnings; use DBI; -use JSONPrefs; +use Backstage::JSONPrefs; use Getopt::Long; -my $batch_size = 10; +my $batch_size = 5000; my $lower_bound = 0; +my $hostname = 'evergreentest.catalystitservices.com'; +my $portnum = '5415'; my $result = GetOptions("lower-bound=i" => \$lower_bound, - "batch-size=i" => \$batch_size); + "batch-size=i" => \$batch_size, + "hostname=s" => \$hostname, + "portnum=s" => \$portnum); #my $egdbi = JSONPrefs->load($ENV{'HOME'} . "/myprefs.d/egdbi.json"); @@ -67,17 +71,17 @@ my $dsn = "dbi:Pg:database=evergreen"; # $dsn .= ";host=" . $egdbi->host; #} -$dsn .= ";host=10.1.6.11"; +$dsn .= ";host=$hostname"; #if ($egdbi->port) { # $dsn .= ";port=" . $egdbi->port; #} -$dsn .= ";port=5432"; +$dsn .= ";port=$portnum"; #my $dbh = DBI->connect($dsn,$egdbi->user,$egdbi->password); -my $dbh = DBI->connect($dsn,"evergreen","as.oi76tqyh!a$"); +my $dbh = DBI->connect($dsn,"evergreen","evergreen"); my $q = < \$batch_size, +); + +if(!$batch_size) { + $batch_size = 500; +} + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $query = " + SELECT bib + FROM metabib.bib_export_data"; + +my $dbh = connect_to_db(); +$dbh->do('SET statement_timeout = 0;'); + +print "Querying database for IDs .. \n"; +my $sth = $dbh->prepare($query); +$sth->execute(); +print "Found " . $sth->rows() . " IDs\n"; + +my @ids; +while (my @row = $sth->fetchrow_array()) { + push(@ids, @row); +} +$sth->finish(); +$dbh->disconnect(); +system("mkdir -p /var/KCLS_AUTH/bibs_to_link"); + +my $batch_count = 0; +my $count = 0; + +open BATCH_FILE, ">/var/KCLS_AUTH/bib_link_batches"; +foreach(@ids) { + $count++; + if($count == 1) { + open OUTPUT, ">/var/KCLS_AUTH/bibs_to_link/bibs_to_link_" . $batch_count; + print "Writing file bibs_to_link_" . $batch_count . "\n"; + } + print OUTPUT "$_\n"; + if($count == $batch_size) { + close OUTPUT; + print BATCH_FILE "./authority_control_fields.pl --file /var/KCLS_AUTH/bibs_to_link/bibs_to_link_" . $batch_count ." --refresh\n"; + $batch_count++; + $count = 0; + } +} +if($count) { + close OUTPUT; + print BATCH_FILE "./authority_control_fields.pl --file /var/KCLS_AUTH/bibs_to_link/bibs_to_link_" . $batch_count ." --refresh\n"; +} +close BATCH_FILE; + +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/linking/disbatcher.pl b/KCLS/linking/disbatcher.pl index 84f68bef09..6af320d942 100755 --- a/KCLS/linking/disbatcher.pl +++ b/KCLS/linking/disbatcher.pl @@ -19,7 +19,7 @@ use Getopt::Long; $SIG{CHLD} = \&sig_handler; -my $num = 2; +my $num = 3; my $verbose = 0; my $sleep = 0; my $file; @@ -35,6 +35,9 @@ my ($goal, $count, $running) = (0,0,0); my $fh = *STDIN; +my $time = time(); +my $now_time = localtime; + if ($file) { open($fh, "<$file") or die("Cannot open $file"); } @@ -44,9 +47,10 @@ while (<$fh>) { if ($_) { push(@commands, $_); $goal++; +# print "goal is $goal \n"; } } - +print "goal is $goal\n"; close($fh) if ($file); while ($count < $goal) { @@ -56,8 +60,26 @@ while ($count < $goal) { } else { sleep($sleep) if ($sleep); } - print "$count of $goal processed\n" if ($verbose && $count); - print "$running of $num running\n" if ($verbose && $running); +# $now_time = localtime; +# print "$count of $goal processed at " . $now_time . "\n" if ($verbose && $count); +# print "$running of $num running at " . $now_time . "\n" if ($verbose && $running); +} + +print_time( $time ); + +sub print_time { + use integer; + + my $start = shift || 0; + my $elapsed = time() - $start; + my $hours = $elapsed / (60 * 60); + my $seconds = $elapsed % 60; + my $minutes = ($elapsed - $hours * 60 * 60) / 60; + + print "Time elapsed: "; + print "$hours hours, " if $hours; + print "$minutes minutes, " if $minutes; + print "$seconds seconds\n"; } sub dispatch { @@ -67,7 +89,9 @@ sub dispatch { die("Cannot reproduce!"); } elsif ($pid) { $running++; - print("dispatched: $command\n") if ($verbose); + $now_time = localtime; + print "$count of $goal processed at " . $now_time . "\n" if ($verbose && $count); + print($now_time . ": dispatched: $command\n") if ($verbose); } elsif ($pid == 0) { exec($command); die("exec of $command failed"); diff --git a/KCLS/linking/og_authority_control_fields_batcher.pl b/KCLS/linking/og_authority_control_fields_batcher.pl new file mode 100755 index 0000000000..c157043ad9 --- /dev/null +++ b/KCLS/linking/og_authority_control_fields_batcher.pl @@ -0,0 +1,118 @@ +#!/usr/bin/perl +# --------------------------------------------------------------- +# Copyright © 2012 Merrimack Valley Library Consortium +# Jason Stephenson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# --------------------------------------------------------------- + +# Outputs command lines for running authority_control_fields.pl over +# $batch_size numbers of bibs. The output is printed to standard +# output and can be redirected to a file. It is suitable for use with +# disbatcher.pl (available elsewhere). + +# The default batch size is 10,000 records. You can change this value +# by specifying the --batch-size (-b) option with a numeric argument. +# For instance, to run batches of 1,000 records you could use: +# +# authority_control_fields_batcher.pl -b 1000 + +# You can specify a lower bound. This is an integer value that the +# bib retrieval will start at, so any batches will start at biblio +# recorc entries with an id greater than this value. You specify this +# with the --lower-bound (-l) option: +# +# authority_control_fields_batcher.pl --lower-bound 1380695 +# +# This option is useful if you ran some batches previously and want to +# pick up any bibs added since the last batch. To do this, you'd +# specify the --end_id from the last line of your previous batch as +# the lower bound. +# +# The default lower bound is 0 to run over all of your regular biblio +# record entries. + +# Naturally, the options can be combined. + + + +use strict; +use warnings; +use DBI; +use Backstage::JSONPrefs; +use Getopt::Long; + +my $batch_size = 1000; +my $lower_bound = 0; + +my $result = GetOptions("lower-bound=i" => \$lower_bound, + "batch-size=i" => \$batch_size); + +#my $egdbi = JSONPrefs->load($ENV{'HOME'} . "/myprefs.d/egdbi.json"); + +#my $dsn = "dbi:Pg:database=" . $egdbi->database; + +#my $dsn = "dbi:Pg:database=rel_2_4_1_20130821_auth_v3"; +my $dsn = "dbi:Pg:database=evergreen"; + +#if ($egdbi->host) { +# $dsn .= ";host=" . $egdbi->host; +#} + +$dsn .= ";host=evergreentest.catalystitservices.com"; + +#if ($egdbi->port) { +# $dsn .= ";port=" . $egdbi->port; +#} + +$dsn .= ";port=5415"; + +#my $dbh = DBI->connect($dsn,$egdbi->user,$egdbi->password); + +my $dbh = DBI->connect($dsn,"evergreen","evergreen"); + +my $q = < $lower_bound +AND (source IS NULL + OR source IN (1,2)) +ORDER BY id ASC +END_OF_Q + +my $ids = $dbh->selectall_arrayref($q); +my ($start, $end, $count) = (0, 0, 0); +foreach (@$ids) { + $count++; + $end = $_->[0]; + if ($count == 1) { + $start = $_->[0]; + } + if ($count == $batch_size) { + print_it($start, $end); + $count = 0; + } +} +# Catch the leftovers. +if ($count) { + print_it($start, $end); +} + +sub print_it { + my ($start, $end) = @_; + print("/openils/bin/authority_control_fields.pl "); + if ($start == $end) { + printf("--record=%d\n", $start); + } else { + printf("--start_id=%d --end_id=%d\n", $start, $end); + } +} diff --git a/KCLS/linking/readme b/KCLS/linking/readme index d0a970b455..d0a332b7ed 100644 --- a/KCLS/linking/readme +++ b/KCLS/linking/readme @@ -7,7 +7,7 @@ of the batches you wish to run. Then you will run authority_control_fields_batcher.pl, which will create batches. - ./authority_control_fields_batcher.pl > batches + ./authority_control_fields_batcher.pl --batch-size=1000 --hostname=evergreentest.catalystitservices.com --port=5415 > bib_to_auth_batches_20140623 Then you will run disbatcher.pl, which will run through the batches. @@ -16,7 +16,14 @@ Then you will run disbatcher.pl, which will run through the batches. The options are -n for number to run, -f is the batch file you created with authority_control_fields_batcher.pl -v is for verbose + -n is the number of processes to run The authority and bib records should now be linked. Enjoy. + + +======================== authority to authority linking ======================== +./authority_authority_linker_batcher.pl --batch-size=1000 --hostname=evergreentest.catalystitservices.com --port=5415 > auth_to_auth_batches_20140623 + +./disbatcher.pl -n 4 -f /home/kclsdev/24kcls_evergreen/linking/auth_to_auth_batches_20140627 \ No newline at end of file diff --git a/KCLS/linking/set_export_date_on_bibs.pl b/KCLS/linking/set_export_date_on_bibs.pl new file mode 100755 index 0000000000..7b1392f1ad --- /dev/null +++ b/KCLS/linking/set_export_date_on_bibs.pl @@ -0,0 +1,80 @@ +#!/usr/bin/perl + +use strict; +use warnings; + +use OpenSRF::System; +use DBI; +use Getopt::Long; +use Data::Dumper; + +my $file; +my $date; +my %options; +my $result = GetOptions( + \%options, + 'file=s' => \$file, + 'date=s' => \$date, +); + +if(!$file) { + print "Must specify file of bib ids.\n"; + exit; +} + if(!$date) { + print "Must specify an export date. \n"; + exit; +} + +my $config = '/openils/conf/opensrf_core.xml'; +OpenSRF::System->bootstrap_client( config_file => $config ); + +my @records; +open FILE, "<", $file or die "Can't open file " . $file; +print "Reading file " . $file . "\n"; +while() { + chomp; + if($_) { + push(@records, $_); + } +} +close FILE; + + +my $dbh = connect_to_db(); +print "Updating rows.\n"; +foreach(@records) { + my $query = "SELECT * FROM metabib.set_export_date(" . $_ . ", '" . $date . "')"; + my $sth = $dbh->prepare($query); + $sth->execute(); + $sth->finish(); + } + +$dbh->disconnect(); + +print "Updated " . scalar(@records) . " rows.\n"; + +# Grab DB information from local settings. Return connected db handle (or die) +sub connect_to_db { + my $sc = OpenSRF::Utils::SettingsClient->new; + my $db_driver = $sc->config_value( reporter => setup => database => 'driver' ); + my $db_host = $sc->config_value( reporter => setup => database => 'host' ); + my $db_port = $sc->config_value( reporter => setup => database => 'port' ); + my $db_name = $sc->config_value( reporter => setup => database => 'db' ); + if (!$db_name) { + $db_name = $sc->config_value( reporter => setup => database => 'name' ); + print STDERR "WARN: is a deprecated setting for database name. For future compatibility, you should use instead." if $db_name; + } + my $db_user = $sc->config_value( reporter => setup => database => 'user' ); + my $db_pw = $sc->config_value( reporter => setup => database => 'pw' ); + + die "Unable to retrieve database connection information from the settings server" unless ($db_driver && $db_host && $db_port && $db_name && $db_user); + + my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + + return DBI->connect( + $dsn,$db_user,$db_pw, { + AutoCommit => 1, pg_enable_utf8 => 1, RaiseError => 1 + } + ); # shouldn't need 'or die...' with RaiseError=>1 +} diff --git a/KCLS/linking/throttle_disbatcher.pl b/KCLS/linking/throttle_disbatcher.pl new file mode 100755 index 0000000000..13e6a380b4 --- /dev/null +++ b/KCLS/linking/throttle_disbatcher.pl @@ -0,0 +1,224 @@ +#!/usr/bin/perl +# throttle_disbatcher.pl is build from dispatcher.pl +# disbatcher.pl license follows + +# Copyright © 2012 Jason J.A. Stephenson +# +# disbatcher.pl is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# disbatcher.pl is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with disbatcher.pl. If not, see +# . + +# throttle_disbatcher.pl adds the following functionality to disbatcher.pl +# This program will batch processes at two rates depending on the time of day. +# Runs num processes during normal time +# Runs max_num processes during throttle up times. +# Throttle up time is defined by start_time and end_time. Both of these are hour numbers +# 0 - 23. If start_time is greater than end_time the throttle up period is from start_time until +# end_time the next day. Otherwise throttle up period is between start_time and end_time of that day. + +use Getopt::Long; +use Time::localtime; + +$SIG{CHLD} = \&sig_handler; + +my $num = 3; +my $verbose = 0; +my $sleep = 0; +my $file; + +my $start_throttle_up_time = 1; +my $end_throttle_up_time = 1; +my $max_proc = $num; +my $throttle_num; + +my $result = GetOptions("verbose" => \$verbose, + "num=i" => \$num, + "file=s" => \$file, + "sleep=i" => \$sleep, + "start_time=i" => \$start_throttle_up_time, + "end_time=i" => \$end_throttle_up_time, + "max_num=i" => \$throttle_num); + +if(!$throttle_num) { + $throttle_num = $num; +} + +my @commands = (); + +my ($goal, $count, $running) = (0,0,0); + +my $fh = *STDIN; + +my $time = time(); +my $now_time = localtime; + +if ($file) { + open($fh, "<$file") or die("Cannot open $file"); +} + +while (<$fh>) { + chomp; + if ($_) { + push(@commands, $_); + $goal++; +# print "goal is $goal \n"; + } +} +print "goal is $goal\n"; +close($fh) if ($file); + +while ($count < $goal) { + $now_time = localtime; + # Should we throttle up? + if(is_throttled_up($now_time->hour(), $start_throttle_up_time, $end_throttle_up_time)) { + $max_proc = $throttle_num; + } else { + $max_proc = $num; + } + if (scalar(@commands) && $running < $max_proc) { + my $command = shift(@commands); + dispatch($command); + } else { + sleep($sleep) if ($sleep); + } +# $now_time = localtime; +# print "$count of $goal processed at " . $now_time . "\n" if ($verbose && $count); +# print "$running of $num running at " . $now_time . "\n" if ($verbose && $running); +} + +print_time( $time ); + +# Breaks a day into normal periods and throttled up periods. If start is greater than end, throttle up +# period is from start to end of the day and then from start of next day to end. Otherwise throllte up period +# is between start and end of that day. Start and end are hour numbers 0-23 +sub is_throttled_up { + my $hour = shift; + my $start = shift; + my $end = shift; + + if($start < $end) { + if($hour >= $start && $hour < $end) { + return 1; + } + } else { + if($hour >= $start || $hour < $end) { + return 1; + } + } + + return 0; +} + +sub print_time { + use integer; + + my $start = shift || 0; + my $elapsed = time() - $start; + my $hours = $elapsed / (60 * 60); + my $seconds = $elapsed % 60; + my $minutes = ($elapsed - $hours * 60 * 60) / 60; + + print "Time elapsed: "; + print "$hours hours, " if $hours; + print "$minutes minutes, " if $minutes; + print "$seconds seconds\n"; +} + +sub dispatch { + my $command = shift; + my $pid = fork(); + if (!defined($pid)) { + die("Cannot reproduce!"); + } elsif ($pid) { + $running++; + $now_time = localtime; + print "$count of $goal processed at " . $now_time . "\n" if ($verbose && $count); + print($now_time . ": dispatched: $command\n") if ($verbose); + print $running . " processes running\n" if ($verbose); + } elsif ($pid == 0) { + exec($command); + die("exec of $command failed"); + } +} + +sub sig_handler { + $running--; + $count++; +} + +__END__ + +=head1 NAME + +disbatcher.pl - Dispatches and batches a list of commands + +=head1 SYNOPSIS + +C [B<--verbose>] [B<--file>=I] [B<--num>=I] +[B<--sleep>=I] + +=head1 DESCRIPTION + +For a given list of commands stored in a I or passed in via +standard input, B reads the command list into an array +and then batches them, running I of them simultaneously. As each +command finishes, the next command is started. The program will +maintain I commands running until the command list is exhausted. +At which point, it will simply wait until the remaining commands +finish running. + +For the sake of efficiency, you can tell the program to I once +it has hit I running processes. This will cause the loop to +temporarily stop iterating until either the sleep expires or one of +the running processes finishes. Sleeping will improve, not degrade, +performance, since we use signals to determine when to start a new +process. + +If you tell the program to be I, it will periodically output +the number of running processes, the number of finished processes, and +the command line of each process as it is started. + +=head1 EXAMPLES + +Not today, maybe later. + +=head1 BUGS + +This is some simple, yet powerful, code. It makes a very nice footgun +if you are not paying attention with your options. You can easily +fork bomb your system if you set the value of the I argument too +high. You are expected to know what you are doing, and if you don't, +then don't use this software until you do know. + +=head1 AUTHOR + +Jason Stephenson + +=head1 COPYRIGHT AND LICENSE + +Copyright © 2012 Jason J.A. Stephenson + +disbatcher.pl is free software: you can redistribute it and/or +modify it under the terms of the GNU General Public License as +published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +disbatcher.pl is distributed in the hope that it will be useful, but +WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +General Public License for more details. + + +=cut + + diff --git a/KCLS/openils/var/templates_kcls/opac/advanced.tt2 b/KCLS/openils/var/templates_kcls/opac/advanced.tt2 index f6e06038a5..6f8514bdcd 100644 --- a/KCLS/openils/var/templates_kcls/opac/advanced.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/advanced.tt2 @@ -2,16 +2,18 @@ WRAPPER "opac/parts/base.tt2"; INCLUDE "opac/parts/topnav.tt2"; ctx.page_title = l("Advanced Search"); - pane = CGI.param("pane") || "advanced"; + pane = CGI.param("pane") || "advanced"; + sort = CGI.param("sort") || "pubdate.descending"; + loc = ctx.search_ou; -%]
- [% INCLUDE "opac/parts/printnav.tt2" %]
- - - + + + +
@@ -24,6 +26,8 @@ [% INCLUDE "opac/parts/advanced/numeric.tt2" %] [% ELSIF pane == 'expert' %] [% INCLUDE "opac/parts/advanced/expert.tt2" %] + [% ELSIF pane == 'browse' %] + [% INCLUDE "opac/parts/advanced/browse.tt2" %] [% END %]
diff --git a/KCLS/openils/var/templates_kcls/opac/browse_items.tt2 b/KCLS/openils/var/templates_kcls/opac/browse_items.tt2 new file mode 100644 index 0000000000..5d1ebaf991 --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/browse_items.tt2 @@ -0,0 +1,89 @@ +[% PROCESS "opac/parts/header.tt2"; + PROCESS "opac/parts/misc_util.tt2"; + WRAPPER "opac/parts/base.tt2"; + INCLUDE "opac/parts/topnav.tt2"; + + IF is_advanced || is_special; + ctx.page_title = l("Browse Results"); + ELSE; + ctx.page_title = l("Browse Results: ") _ CGI.param('query') | html; + END; + + page = ctx.search_page; + page = page.match('^\d+$') ? page : 0; # verify page is a sane value + + page_count = ctx.page_size == 0 ? 1 : POSIX.ceil(ctx.hit_count / ctx.page_size); +%] + + + + + +
+
+ [% INCLUDE "opac/parts/searchbar_browse.tt2" %] + [% INCLUDE "opac/parts/browse_set_nav.tt2" %] +
+
[% #end of search-header %] +
+
+
+ + [% IF ctx.mylist.size %] +
+ [%- IF ctx.user; %] + [% l('View My List') %] + [%- ELSE %] + [% l('View My List') %] + [%- END %] +
+ [% END %] + + +
+ + [% INCLUDE "opac/parts/browse_filtersort.tt2" value=CGI.param('sort') %] +
+
+
+ + +
+ + + + +
+
+
+
+
+
+
+
+ + [% IF ctx.display_groupings %] + [% path = "opac/parts/result/" _ + ( ctx.records.size ? "table_grouped.tt2" : "lowhits.tt2" ); + INCLUDE $path isBrowse='true' %] + [% ELSE %] + [% path = "opac/parts/result/" _ + ( ctx.records.size ? "table.tt2" : "lowhits_browse.tt2" ); + INCLUDE $path isBrowse='true' %] + [% END %] +
+
+
+
+
+[% END %] diff --git a/KCLS/openils/var/templates_kcls/opac/browse_results.tt2 b/KCLS/openils/var/templates_kcls/opac/browse_results.tt2 new file mode 100644 index 0000000000..48b40d6ff2 --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/browse_results.tt2 @@ -0,0 +1,365 @@ +[%- # This is the bib and authority combined record browser. + + PROCESS "opac/parts/header.tt2"; + PROCESS "opac/parts/misc_util.tt2"; + WRAPPER "opac/parts/base.tt2"; + INCLUDE "opac/parts/topnav.tt2"; + + ctx.page_title = l("Browse the Catalog"); + blimit = CGI.param('blimit') || ctx.opac_hits_per_page || 11; + sort = CGI.param('sort') || "pubdate.descending"; + + depart_list = ['blimit', 'bterm', 'bpivot']; +%] + + + +[% IF ctx.is_staff %] + + +[% END %] + + + + +
+ [% INCLUDE "opac/parts/searchbar_browse.tt2" %] +
+ +
+
+ [% # Main browse search area, includes paging and search results %] +
+ [% # Code for defining "browse_pager", so it may be called twice below %] + [% BLOCK browse_pager %] + [% current_qtype = CGI.param('qtype'); %] + [% SWITCH current_qtype %] + [% CASE "title" %] + [% formatted_qtype = "Title"; %] + [% CASE "author" %] + [% formatted_qtype = "Author"; %] + [% CASE "subject" %] + [% formatted_qtype = "Subject"; %] + [% CASE "series" %] + [% formatted_qtype = "Series Title"; %] + [% CASE "id|bibcn" %] + [% formatted_qtype = "Bib Call Number"; %] + [% END %] +
+
+ Browsing [% formatted_qtype %]s that begin with [% CGI.param('bterm') %] +
+
+
+ [% IF ctx.back_pivot %] + + ← [%l ('Prev') %] [% blimit %] [% formatted_qtype %]s + + [% END %] +
+ + [% IF ctx.pager_shortcuts; %] +
+ + [% FOR shortcut IN ctx.pager_shortcuts %] + + [% shortcut.1 %] + + [% END %] + +
+ [% END %] + +
+ [% IF ctx.forward_pivot %] + + [%l ('Next') %] [% blimit %] [% formatted_qtype %]s → + + [% END %] +
+ +
+ +
+
[% # end of browse-pager-items %] +
+ [% END # end of browse_pager code %] + + [% # Top "browse_pager", (Navigation) %] + [% PROCESS browse_pager id=0 %] + +
+ [% IF ctx.browse_error %] + + [% l("An error occurred browsing records. Please try again in a moment or report the issue to library staff.") %] + + [% ELSE %] + [% # NOTE: If statement to handle filing indicator %] + [% IF ctx.browse_leading_article_warning %] +
+ [% l("Your browse term seems to begin with an article (a, an, the). You might get better results by omitting the article.") %] + [% IF ctx.browse_leading_article_alternative %] +

+ [% alternative_link = BLOCK %] + "[% ctx.browse_leading_article_alternative | html %]" + [%- END; # alternative_link BLOCK + l("Did you mean [_1]?", alternative_link); + END # IF %] +

+
+ [% END %] + +
    + [% counter = 1; %] + [% FOR result IN ctx.browse_results %] + + [% # The following IF statement determines the background color of the result item %] + [% IF (counter % 2) != 0 %] +
  1. + [% ELSE %] +
  2. + [% END %] + [% counter = counter + 1; %] + + + [% IF result.sources > 0 %] [% # bib-linked browse value %] + [% IF ctx.is_staff %] + + [% result.value %] + + [% ELSE %] + + [% result.value %] + + [% END %] + + ([% + IF result.accurate == 'f'; + l("At least"); " "; + END; + result.sources %]) + [% ELSE %] [% # only authority links %] + [% result.value %] + [% END %] + +
    [% result.history_reference | html %]
    + [% FOR note IN result.notes %] +
    + + [% l("Note:") %] + + + [% FOR piece IN note; piece | html; END %] + +
    + [% END %] +
    [% result.complex_see_also | html %]
    + +
      + [% # Code for See Functionality %] + [% seenit = {}; # for headings we've rendered + FOR a IN result.sees; + PROCESS authority_notes authority=a; + + # Other than displaying public general notes, we can go no further sans control_set. + NEXT UNLESS a.control_set; + + # get_authority_fields is fast and cache-y. + acs = ctx.get_authority_fields(a.control_set); + FOR field_group IN a.headings; + field_id = field_group.keys.0; + field = acs.$field_id; + headings = field_group.values.0; + FOR h IN headings; + # We could display headings without links here when h.target is + # undef, if we wanted to, but note that h.target_count is only defined when h.target is. + + IF h.target AND h.target_count AND result.list_authorities.grep('^' _ h.target _ '$').size == 0; + id = h.target; NEXT IF seenit.$id; seenit.$id = 1; %] + [% target = h.target; %] + [% IF result.ref_headings.$target.show == 1; %] +
    • [% result.ref_headings.$target.display %] + + [% h.heading | html %] + + ([% h.target_count %]) +
      + [% FOR note IN result.ref_headings.$target.notes %] +
      + + [% l("Note:") %] + + + [% FOR piece IN note; piece | html; END %] + +
      + [% END %] +
      +
    • + [% END %] + [% END %] + [% END %] + [% END %] + [% END %] + [% # End of Code for See Functionality %] + [% # Code for See Also Functionality %] + [% FOR a IN result.authorities; + PROCESS authority_notes authority=a IF !sees.grep(a.id); + + # Other than displaying public general notes, we can go no further sans control_set. + NEXT UNLESS a.control_set; + + # get_authority_fields is fast and cache-y. + acs = ctx.get_authority_fields(a.control_set); + FOR field_group IN a.headings; + field_id = field_group.keys.0; + field = acs.$field_id; + headings = field_group.values.0; + FOR h IN headings; + # We could display headings without links here when h.target is + # undef, if we wanted to, but note that h.target_count is only defined when h.target is. + + IF h.target AND h.target_count AND result.list_sees.grep('^' _ h.target _ '$').size == 0 AND !h.main_entry; + id = h.target; NEXT IF seenit.$id; seenit.$id = 1; %] + [% target = h.target; %] + [% IF result.ref_headings.$target.show == 1; %] +
    • + [% result.ref_headings.$target.display %] + [% # See Also link generated here %] + + [% h.heading | html %] + + ([% h.target_count %]) +
      + [% FOR note IN result.ref_headings.$target.notes %] +
      + + [% l("Note:") %] + + + [% FOR piece IN note; piece | html; END %] + +
      + [% END %] +
      +
    • + [% END %] + [% END %] + [% END %] + [% END %] + [% END %] + [% # End of Code for See Also Functionality %] +
    +
  3. + [% END %] +
[% # End of browse-result-list %] + [% END %] +
+ + [% # Bottom "browse_pager", (Navigation) %] + [% PROCESS browse_pager id=1 %] +
+ +
+
+ [% # Display a link to do a contains phrase search for the same bib call number browse search term. %] + [% IF current_qtype == "id|bibcn" %] + + [% END %] +
+
+ + [% BLOCK authority_notes; + # Displays public general notes (sometimes called "scope notes" ?) + FOR note IN authority.notes %] +
+ + [% l("Note:") %] + + + [% FOR piece IN note; piece | html; END %] + +
+ [% END; + END; # end of BLOCK authority_notes %] + +[% END %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/advanced/browse.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/advanced/browse.tt2 new file mode 100644 index 0000000000..b85acd01cb --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/parts/advanced/browse.tt2 @@ -0,0 +1,27 @@ +[%- + # This page is loaded from advanced.tt2 + + blimit = CGI.param('blimit') || ctx.opac_hits_per_page || 11; + sort = CGI.param("sort") || "pubdate.descending"; +%] + +[% # browse search form %] +
+
[% l("Browse Search") %]
+ + + [% control_qtype = INCLUDE "opac/parts/qtype_selector.tt2" + id="browse-search-class" browse_only=1 plural=1 %] + [% control_bterm = BLOCK %][% END %] + [% PROCESS "opac/parts/org_selector.tt2"; %] + [% control_locg = INCLUDE build_org_selector id='browse-context' + show_loc_groups=1 + arialabel=l('Select holding library') %] + [% l('Browse for [_1] that begin with [_2] in [_3]', control_qtype, control_bterm, control_locg) %] + + + + [% # Sets the browse search term field as the default on browse search page load. %] + +
diff --git a/KCLS/openils/var/templates_kcls/opac/parts/base.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/base.tt2 index 68b8ffa5e5..bf3e0bdf66 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/base.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/base.tt2 @@ -16,7 +16,6 @@ [% INCLUDE 'opac/parts/js.tt2' %] [% content %] - [% INCLUDE 'opac/parts/footer.tt2' %] [%# INCLUDE 'opac/parts/chilifresh.tt2' %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/browse_filtersort.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/browse_filtersort.tt2 new file mode 100644 index 0000000000..932aef4902 --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/parts/browse_filtersort.tt2 @@ -0,0 +1,16 @@ + + + diff --git a/KCLS/openils/var/templates_kcls/opac/parts/browse_set_nav.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/browse_set_nav.tt2 new file mode 100644 index 0000000000..edf4b72469 --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/parts/browse_set_nav.tt2 @@ -0,0 +1,14 @@ + [% # Browse set navigation %] + + +
+ + + + | + Loading Navigation... + | + + + +
\ No newline at end of file diff --git a/KCLS/openils/var/templates_kcls/opac/parts/js.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/js.tt2 index 9fd52aa3ae..5067acddcc 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/js.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/js.tt2 @@ -44,7 +44,11 @@ "[% ctx.next_rec_url || '' %]", "[% mkurl(ctx.first_search_record, {page => 0}) %]", "[% mkurl(ctx.opac_root _ '/results', {find_last => 1, page => POSIX.floor((ctx.hit_count - 1) / ctx.page_size)}) %]", - "[% mkurl(ctx.opac_root _ '/results', {}, ['expand','cnoffset']) %]" + [% IF CGI.param('bterm') %] + "[% mkurl(ctx.opac_root _ '/browse_items', {}, ['expand','cnoffset']) %]" + [% ELSE %] + "[% mkurl(ctx.opac_root _ '/results', {}, ['expand','cnoffset']) %]" + [% END %] ); [% END %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/printnav.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/printnav.tt2 index 13a8f666f5..54e12b281f 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/printnav.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/printnav.tt2 @@ -1,25 +1 @@ -
-
-
- -
- [% l('Back') %] - [% l('Homepage') %] - [% l('Print Page') %] - [% l('Help') %] - [% l('Forward') %] -
- -
-
-
+[% # Purposely left blank, since public search is taken care of by BiblioCommons %] \ No newline at end of file diff --git a/KCLS/openils/var/templates_kcls/opac/parts/qtype_selector.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/qtype_selector.tt2 index 6e2b99ff73..9fbb7b621a 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/qtype_selector.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/qtype_selector.tt2 @@ -1,18 +1,26 @@ [% query_types = [ {value => "keyword", label => l("Keyword")}, - {value => "title", label => l("Title")}, - {value => "author", label => l("Author")}, - {value => "subject", label => l("Subject")}, - {value => "series", label => l("Series")}, - {value => "id|bibcn", label => l("Bib Call Number")} + {value => "title", label => l("Title"), plural_label => l("Titles"), browse => 1}, + {value => "author", label => l("Author"), plural_label => l("Authors (Last, First)"), browse => 1}, + {value => "subject", label => l("Subject"), plural_label => l("Subjects"), browse => 1}, + {value => "series", label => l("Series"), plural_label => l("Series Titles"), browse => 1}, + {value => "id|bibcn", label => l("Bib Call Number"), plural_label => l("Bib Call Numbers"), browse => 1} {value => "identifier", label => l("ISBN/ISSN/Other")} {value => "keyword|publisher", label => l("Publisher")} ] %] - %] + diff --git a/KCLS/openils/var/templates_kcls/opac/parts/record/authors.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/record/authors.tt2 index 7304aeb7af..088631a653 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/record/authors.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/record/authors.tt2 @@ -26,13 +26,17 @@ authors = [ BLOCK build_author_links; FOR node IN ctx.marc_xml.findnodes(xpath); + author_content = []; FOR subfield IN node.childNodes; NEXT UNLESS subfield.nodeName == "subfield"; code = subfield.getAttribute('code'); NEXT UNLESS code.match('[a-z]'); term = subfield.textContent | html; - '
' _ term _ ''; + author_content.push(term); END; + author_content_term_display = author_content.join(" "); + author_content_term = author_content_term_display | replace('[,\.:;]', ''); + '
' _ author_content_term_display _ ''; END; END; %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/record/body.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/record/body.tt2 index d685d9212e..84a463ab0a 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/record/body.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/record/body.tt2 @@ -5,10 +5,20 @@ %]
+ [% + # page_type = denotes if the current page is from a browse page + page_type = '/results'; + IF (CGI.param('bterm')); + page_type = '/browse_items'; + END; + %] + [% IF ctx.search_result_index >= 0 %]
- [% l('◄ Search Results') %] + [% IF (ctx.hit_count > 1) %] + [% l('◄ Search Results') %] + [% END %] [% l('Showing Item [_1] of [_2]', ctx.search_result_index + 1, ctx.hit_count) %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/record/extras.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/record/extras.tt2 index 5f91f5c0dd..ebc94022a5 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/record/extras.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/record/extras.tt2 @@ -46,7 +46,7 @@ href = mkurl(ctx.opac_root _ '/record/' _ ctx.bre_id, {}, ['expand']); img_url = ctx.media_prefix _ '/images/kcls_rdetail_arrow_down.png'; ELSE; - href = mkurl(ctx.opac_root _ '/record/' _ ctx.bre_id, {expand => name}) _ '#' _ name; + href = mkurl(ctx.opac_root _ '/record/' _ ctx.bre_id, {expand => name}) _ '#' _ name; img_url = ctx.media_prefix _ '/images/kcls_rdetail_arrow.png'; END; %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/result/lowhits_browse.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/result/lowhits_browse.tt2 new file mode 100644 index 0000000000..2af9b2c605 --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/parts/result/lowhits_browse.tt2 @@ -0,0 +1,88 @@ +
+
+
+

[% l('Sorry, no items were available.') %]

+

[% l('Please uncheck the "Limit to available items" checkbox, navigate to a different browse set, or perform another begins with search.') %]

+ + + + + +
+ [% l('Other Possibilities:') %] +
+
+
+ [% INCLUDE "opac/parts/result/lowhits_purchase.tt2" %] +

+ Keyword Search Tips
+ Try changing to Advanced Search. +

+

+ Adjacency
+ Multiple words are not searched together as a phrase. They will + be found in various parts of the record. To search for a phrase, enclose your + search terms in quotation marks.
+ (example: "garcia marquez") +

+

+ Truncation
+ Words may be right-hand truncated using an asterisk. Use a single asterisk * + to truncate any number of characters.
+ (example: environment* agency) +

+

+ Anchored Searching
+ You may use ^ and $ to indicate "phrase begins with" and + "phrase ends with," respectively, within a search phrase + enclosed in quotation marks.
+ (examples: "^harry" for phrases that begin with + the term harry. + "stone$" for phrases that end in stone.) +

+
+
+
+
+
[% l("Few hits were returned for your search.") %]
+
[% l("Zero hits were returned for your search.") %]
+
+ + +
+ [% l("Maybe you meant:") %] + +
+ +
+ [% l("You will find more hits when searching all item formats:") %] + [% l("Search again with all formats?") %] +
+ +
+ [% l("You may also like to try these related searches:") %] +
+ +
+
+ +
+ [% l("You may also wish to expand your search range to:") %] + +
+ +
+ [% l("You can try searching the same terms by:") %] + [% l("title") %] + [% l("author") %] + [% l("subject") %] + [% l("series") %] + [% l("keyword") %] +
+
+
diff --git a/KCLS/openils/var/templates_kcls/opac/parts/result/paginate.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/result/paginate.tt2 index 75a3f183f4..75b73cfcc2 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/result/paginate.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/result/paginate.tt2 @@ -1,4 +1,10 @@ [% BLOCK results_count_header %] +[% IF browse == 'true'; + direct = "/browse_items"; + ELSIF seeAlso == 'true'; + direct = "/see_also"; + ELSE; direct = "/results"; END; +%]
@@ -17,7 +23,7 @@ [% class = 'search_page_nav_link'; href = '#'; IF page > 0; - href = mkurl(ctx.opac_root _ '/results', {page => page - 1}); + href = mkurl(ctx.opac_root _ direct, {page => page - 1}); ELSE; class = class _ ' invisible'; END; %] [% pageitr + 1%] [% ELSE %] - [% pageitr + 1%] + [% pageitr + 1%] [% END; added = added + 1; IF added == 8; LAST; END; @@ -45,7 +51,7 @@ [% class = 'search_page_nav_link'; href = '#'; IF (page + 1) < page_count; - href = mkurl(ctx.opac_root _ '/results', {page => page + 1}); + href = mkurl(ctx.opac_root _ direct, {page => page + 1}); ELSE; class = class _ ' invisible'; END; %] -[% PROCESS "opac/parts/result/paginate.tt2" %] +[% PROCESS "opac/parts/result/paginate.tt2" browse=isBrowse seeAlso=isSeeAlso%] [% ctx.results_count_header = PROCESS results_count_header; ctx.results_count_header %] @@ -81,7 +81,7 @@ [% IF ctx.is_staff %] diff --git a/KCLS/openils/var/templates_kcls/opac/parts/searchbar_browse.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/searchbar_browse.tt2 new file mode 100644 index 0000000000..1a5ad06320 --- /dev/null +++ b/KCLS/openils/var/templates_kcls/opac/parts/searchbar_browse.tt2 @@ -0,0 +1,48 @@ + [% PROCESS "opac/parts/org_selector.tt2" + sort = CGI.param('sort') || "pubdate.descending"; + %] + + + + + + + + diff --git a/KCLS/openils/var/templates_kcls/opac/parts/topnav.tt2 b/KCLS/openils/var/templates_kcls/opac/parts/topnav.tt2 index 763c57f9ea..3b5c68d638 100644 --- a/KCLS/openils/var/templates_kcls/opac/parts/topnav.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/parts/topnav.tt2 @@ -104,4 +104,3 @@
[% END %] -[% INCLUDE "opac/parts/topnav_links.tt2" %] diff --git a/KCLS/openils/var/templates_kcls/opac/record.tt2 b/KCLS/openils/var/templates_kcls/opac/record.tt2 index 2507a4457d..2542ba6e72 100644 --- a/KCLS/openils/var/templates_kcls/opac/record.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/record.tt2 @@ -2,10 +2,16 @@ WRAPPER "opac/parts/base.tt2"; INCLUDE "opac/parts/topnav.tt2"; ctx.page_title = l("Record Detail") %] -
- [% INCLUDE "opac/parts/printnav.tt2" %] - [% INCLUDE "opac/parts/searchbar.tt2" %] -
+ [% IF ( CGI.param('bterm') ) %] +
+ [% INCLUDE "opac/parts/searchbar_browse.tt2" %] + [% INCLUDE "opac/parts/browse_set_nav.tt2" %] +
+ [% ELSE %] +
+ [% INCLUDE "opac/parts/searchbar.tt2" %] +
+ [% END %]
[% INCLUDE "opac/parts/record/body.tt2" %] diff --git a/KCLS/openils/var/templates_kcls/opac/results.tt2 b/KCLS/openils/var/templates_kcls/opac/results.tt2 index c4fedb106c..5f9df7cf54 100644 --- a/KCLS/openils/var/templates_kcls/opac/results.tt2 +++ b/KCLS/openils/var/templates_kcls/opac/results.tt2 @@ -16,7 +16,6 @@ %]
- [% INCLUDE "opac/parts/printnav.tt2" %] [% INCLUDE "opac/parts/searchbar.tt2" took_care_of_form=1 %]
@@ -31,7 +30,7 @@ onmouseout="this.src='[% ctx.media_prefix %]/images/another_search.png';" />
+
+
+
+ [% IF ctx.mylist.size %] + +
+ [% END %] + + [% INCLUDE "opac/parts/browse_filtersort.tt2" value=CGI.param('sort') submit_on_change=1 %] +
+ + +
+ + + + +
+
+
+
+
+
+
+
+ + [% IF ctx.display_groupings %] + [% path = "opac/parts/result/" _ + ( ctx.records.size ? "table_grouped.tt2" : "lowhits.tt2" ); + INCLUDE $path isSeeAlso='true' %] + [% ELSE %] + [% path = "opac/parts/result/" _ + ( ctx.records.size ? "table.tt2" : "lowhits.tt2" ); + INCLUDE $path isSeeAlso='true' %] + [% END %] +
+
+
+
+
+[% END %] diff --git a/KCLS/sql/browse/001.0815.schema.config-metabib-interauthority.sql b/KCLS/sql/browse/001.0815.schema.config-metabib-interauthority.sql new file mode 100644 index 0000000000..8ec25d20a3 --- /dev/null +++ b/KCLS/sql/browse/001.0815.schema.config-metabib-interauthority.sql @@ -0,0 +1,126 @@ +BEGIN; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN linking_subfield CHAR(1); + +UPDATE authority.control_set_authority_field + SET linking_subfield = '0' WHERE main_entry IS NOT NULL; + +CREATE TABLE authority.authority_linking ( + id BIGSERIAL PRIMARY KEY, + source BIGINT REFERENCES authority.record_entry (id) NOT NULL, + target BIGINT REFERENCES authority.record_entry (id) NOT NULL, + field INT REFERENCES authority.control_set_authority_field (id) NOT NULL +); + +-- Given an authority record's ID, control set ID (if known), and marc::XML, +-- return all links to other authority records in the form of rows that +-- can be inserted into authority.authority_linking. +CREATE OR REPLACE FUNCTION authority.calculate_authority_linking( + rec_id BIGINT, rec_control_set INT, rec_marc_xml XML +) RETURNS SETOF authority.authority_linking AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + link TEXT; + aal authority.authority_linking%ROWTYPE; +BEGIN + IF rec_control_set IS NULL THEN + -- No control_set on record? Guess at one + SELECT control_set INTO rec_control_set + FROM authority.control_set_authority_field + WHERE tag IN ( + SELECT UNNEST( + XPATH('//*[starts-with(@tag,"1")]/@tag',rec_marc_xml::XML)::TEXT[] + ) + ) LIMIT 1; + + IF NOT FOUND THEN + RAISE WARNING 'Could not even guess at control set for authority record %', rec_id; + RETURN; + END IF; + END IF; + + aal.source := rec_id; + + FOR acsaf IN + SELECT * FROM authority.control_set_authority_field + WHERE control_set = rec_control_set + AND linking_subfield IS NOT NULL + AND main_entry IS NOT NULL + LOOP + link := SUBSTRING( + (XPATH('//*[@tag="' || acsaf.tag || '"]/*[@code="' || + acsaf.linking_subfield || '"]/text()', rec_marc_xml))[1]::TEXT, + '\d+$' + ); + + -- Ignore links that are null, malformed, circular, or point to + -- non-existent authority records. + IF link IS NOT NULL AND link::BIGINT <> rec_id THEN + PERFORM * FROM authority.record_entry WHERE id = link::BIGINT; + IF FOUND THEN + aal.target := link::BIGINT; + aal.field := acsaf.id; + RETURN NEXT aal; + END IF; + END IF; + END LOOP; +END; +$func$ LANGUAGE PLPGSQL; + + +-- AFTER UPDATE OR INSERT trigger for authority.record_entry +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete () RETURNS TRIGGER AS $func$ +BEGIN + + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + SELECT record, atag, value, sort_value FROM authority.simple_heading_set(NEW.marc); + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; diff --git a/KCLS/sql/browse/002.0816.schema.bib-auth-browse.sql b/KCLS/sql/browse/002.0816.schema.bib-auth-browse.sql new file mode 100644 index 0000000000..74506a3da7 --- /dev/null +++ b/KCLS/sql/browse/002.0816.schema.bib-auth-browse.sql @@ -0,0 +1,7527 @@ +BEGIN; + +-- Section 1: authority.control_set_authority_field table -- SAFE + +-- To avoid problems with altering a table column after doing an +-- update. +ALTER TABLE authority.control_set_authority_field + DISABLE TRIGGER ALL; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN display_sf_list TEXT; + +UPDATE authority.control_set_authority_field + SET display_sf_list = REGEXP_REPLACE(sf_list, '[w254]', '', 'g'); + +ALTER TABLE authority.control_set_authority_field + ALTER COLUMN display_sf_list SET NOT NULL; + +ALTER TABLE authority.control_set_authority_field + ENABLE TRIGGER ALL; + +-- Section 2: metabib.browse_entry_def_map and config.metabib_field -- SAFE +-- I think this breaks it because in KCLS, they changed some of the formats to kcls and changed the xpath. + +ALTER TABLE metabib.browse_entry_def_map + ADD COLUMN authority BIGINT REFERENCES authority.record_entry (id) + ON DELETE SET NULL; + +ALTER TABLE config.metabib_field ADD COLUMN authority_xpath TEXT; +ALTER TABLE config.metabib_field ADD COLUMN browse_sort_xpath TEXT; + +UPDATE config.metabib_field + SET authority_xpath = '//@xlink:href' + WHERE + format = 'mods32' AND + field_class IN ('subject','series','title','author') AND + browse_field IS TRUE; + +UPDATE config.metabib_field + SET authority_xpath = '//@xlink:href' + WHERE + format = 'kcls' AND + field_class IN ('subject','series','title','author') AND + browse_field IS TRUE; + +ALTER TYPE metabib.field_entry_template ADD ATTRIBUTE authority BIGINT; +ALTER TYPE metabib.field_entry_template ADD ATTRIBUTE sort_value TEXT; + +-- Section 3: metabib.reingest_metabib_field_entries function + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +-- Section 4: biblio.extract_metabib_field_entry function + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry ( rid BIGINT, default_joiner TEXT ) RETURNS SETOF metabib.field_entry_template AS $func$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + curr_text := ARRAY_TO_STRING( + oils_xpath( '//text()', + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ) + ), + ' ' + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.search_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$func$ LANGUAGE PLPGSQL; + +-- Section 6: config.xml_transform + +-- 953.data.MODS32-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + + + + b + afgk + + + + + abfgk + + + + + + <xsl:value-of select="$title"/> + + + + + + + b + b + afgk + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + a + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,$nfi+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + + ah + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + + braille + + +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + + + av + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + + adfgklmorsv + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + issn + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + ba + ab + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abje + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + + + adfhklor + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods32'; + +-- Section 7: config.xml_transform + +-- 954.data.MODS33-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ + + + + ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ + + + + !'()*-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz~ + + + 0123456789ABCDEF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, b --> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="uri"/> + + <xsl:variable name="str"> + <xsl:for-each select="marc:subfield"> + <xsl:if + test="(contains('adfklmors',@code) and (not(../marc:subfield[@code='n' or @code='p']) or (following-sibling::marc:subfield[@code='n' or @code='p'])))"> + <xsl:value-of select="text()"/> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="substring($str,1,string-length($str)-1)"/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">ah</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + creator + + + + + + + + + + + + creator + + + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote-sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + offprint + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + + + + abcdef + - + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="marc:subfield[@code='a']"/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + + issn + + + + + + + + issn-l + + + + + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + + ba + ab + + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abe + + + + + + + + + u + + + + + + + + hijklmt + + + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + aacr2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfhklor</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + abcdefghijklmnopqrstuvwxyz + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Warning: string contains a character + that is out of range! Substituting "?". + 63 + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods33'; + +-- Section 8: config.global_flag + +INSERT INTO config.global_flag (name, value, enabled, label) VALUES +( + 'opac.browse.warnable_regexp_per_class', + '{"title": "^(a|the|an)\\s"}', + FALSE, + oils_i18n_gettext( + 'opac.browse.warnable_regexp_per_class', + 'Map of search classes to regular expressions to warn user about leading articles.', + 'cgf', + 'label' + ) +), +( + 'opac.browse.holdings_visibility_test_limit', + '100', + TRUE, + oils_i18n_gettext( + 'opac.browse.holdings_visibility_test_limit', + 'Don''t look for more than this number of records with holdings when displaying browse headings with visible record counts.', + 'cgf', + 'label' + ) +); + +-- Section 10: metabib.browse_entry and metabib.browse_entry_def_map + +ALTER TABLE metabib.browse_entry DROP CONSTRAINT browse_entry_value_key; +ALTER TABLE metabib.browse_entry ADD COLUMN sort_value TEXT; +DELETE FROM metabib.browse_entry_def_map; -- Yeah. +DELETE FROM metabib.browse_entry WHERE sort_value IS NULL; +ALTER TABLE metabib.browse_entry ALTER COLUMN sort_value SET NOT NULL; +ALTER TABLE metabib.browse_entry ADD UNIQUE (sort_value, value); +DROP TRIGGER IF EXISTS mbe_sort_value ON metabib.browse_entry; + +CREATE INDEX browse_entry_sort_value_idx + ON metabib.browse_entry USING BTREE (sort_value); + +-- NOTE If I understand ordered indices correctly, an index on sort_value DESC +-- is not actually needed, even though we do have a query that does ORDER BY +-- on this column in that direction. The previous index serves for both +-- directions, and ordering in an index is only helpful for multi-column +-- indices, I think. See http://www.postgresql.org/docs/9.1/static/indexes-ordering.html + +-- CREATE INDEX CONCURRENTLY browse_entry_sort_value_idx_desc +-- ON metabib.browse_entry USING BTREE (sort_value DESC); + +-- Section 11: metabib.flat_browse_entry_appearance + +CREATE TYPE metabib.flat_browse_entry_appearance AS ( + browse_entry BIGINT, + value TEXT, + fields TEXT, + authorities TEXT, + sources INT, -- visible ones, that is + row_number INT, -- internal use, sort of + accurate BOOL, -- Count in sources field is accurate? Not + -- if we had more than a browse superpage + -- of records to look at. + pivot_point BIGINT +); + +-- Section 12: metabib.browse_pivot function + +CREATE OR REPLACE FUNCTION metabib.browse_pivot( + search_field INT[], + browse_term TEXT +) RETURNS BIGINT AS $p$ +DECLARE + id BIGINT; +BEGIN + SELECT INTO id mbe.id FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_def_map mbedm ON ( + mbedm.entry = mbe.id AND + mbedm.def = ANY(search_field) + ) + WHERE mbe.sort_value >= public.search_normalize(browse_term) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; + + RETURN id; +END; +$p$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION metabib.staged_browse( + query TEXT, + fields INT[], + context_org INT, + context_locations INT[], + staff BOOL, + browse_superpage_size INT, + count_up_from_zero BOOL, -- if false, count down from -1 + result_limit INT, + next_pivot_pos INT +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now + SELECT INTO all_records, result_row.authorities, result_row.fields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT def), $$,$$) + FROM metabib.browse_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + result_row.sources := 0; + + full_end := ARRAY_LENGTH(all_records, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_records[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + IF result_row.sources > 0 THEN + -- We've got a browse entry with visible holdings. Yay. + + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.accurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$p$ LANGUAGE PLPGSQL; + +-- Section 13: metabib.browse function + +CREATE OR REPLACE FUNCTION metabib.browse( + search_field INT[], + browse_term TEXT, + context_org INT DEFAULT NULL, + context_loc_group INT DEFAULT NULL, + staff BOOL DEFAULT FALSE, + pivot_id BIGINT DEFAULT NULL, + result_limit INT DEFAULT 10 +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + pivot_id := metabib.browse_pivot(search_field, browse_term); + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' + SELECT + mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_entry mbe + WHERE EXISTS (SELECT 1 FROM metabib.browse_entry_def_map mbedm WHERE + mbedm.entry = mbe.id AND + mbedm.def = ANY(' || quote_literal(search_field) || ') + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.sort_value <= ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.sort_value > ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot + ) ORDER BY row_number DESC; + +END; +$p$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION metabib.browse( + search_class TEXT, + browse_term TEXT, + context_org INT DEFAULT NULL, + context_loc_group INT DEFAULT NULL, + staff BOOL DEFAULT FALSE, + pivot_id BIGINT DEFAULT NULL, + result_limit INT DEFAULT 10 +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +BEGIN + RETURN QUERY SELECT * FROM metabib.browse( + (SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class), + browse_term, + context_org, + context_loc_group, + staff, + pivot_id, + result_limit + ); +END; +$p$ LANGUAGE PLPGSQL; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:relatedItem[@type="series"]/mods32:titleInfo[@type="nfi"]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'series' AND name = 'seriestitle' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and not (@type)]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL, + browse_field = TRUE +WHERE + field_class = 'title' AND name = 'proper' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='alternative-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'alternative' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='uniform-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'uniform' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='translated-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'translated' AND format = 'mods32'; + +-- This keeps extra terms like "creator" out of browse headings. +UPDATE config.metabib_field + SET browse_xpath = $$//*[local-name()='namePart']$$ -- vim */ + WHERE + browse_field AND + browse_xpath IS NULL AND + field_class = 'author'; + +-- Section 14: config.org_unit_setting_type + +INSERT INTO config.org_unit_setting_type ( + name, label, grp, description, datatype +) VALUES ( + 'opac.browse.pager_shortcuts', + 'Paging shortcut links for OPAC Browse', + 'opac', + 'The characters in this string, in order, will be used as shortcut links for quick paging in the OPAC browse interface. Any sequence surrounded by asterisks will be taken as a whole label, not split into individual labels at the character level, but only the first character will serve as the basis of the search.', + 'string' +); + +COMMIT; + +--SELECT metabib.reingest_metabib_field_entries(id, TRUE, FALSE, TRUE) +-- FROM biblio.record_entry; diff --git a/KCLS/sql/browse/003.0821.function.browse-normalize-timing.sql b/KCLS/sql/browse/003.0821.function.browse-normalize-timing.sql new file mode 100644 index 0000000000..1c9591aa83 --- /dev/null +++ b/KCLS/sql/browse/003.0821.function.browse-normalize-timing.sql @@ -0,0 +1,95 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + mbe_txt TEXT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + mbe_txt := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry WHERE value = mbe_txt; + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry (value) VALUES (mbe_txt); + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source) + VALUES (mbe_id, ind_data.field, ind_data.source); + END IF; + + -- Avoid inserting duplicate rows, but retain granularity of being + -- able to search browse fields with "starts with" type operators + -- (for example, for titles of songs in music albums) + IF (ind_data.search_field OR ind_data.browse_field) AND NOT b_skip_search THEN + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; + diff --git a/KCLS/sql/browse/004.0837.schema.browse-auth-linking.plus-joiner.sql b/KCLS/sql/browse/004.0837.schema.browse-auth-linking.plus-joiner.sql new file mode 100644 index 0000000000..278a5fddc6 --- /dev/null +++ b/KCLS/sql/browse/004.0837.schema.browse-auth-linking.plus-joiner.sql @@ -0,0 +1,985 @@ +-- Evergreen DB patch 0837.schema.browse-auth-linking.plus-joiner.sql +-- +-- In this upgrade script we complete inter-subfield joiner support, so that +-- subject components can be separated by " -- ", for instance. That's the +-- easy part. +-- +-- We also add the ability to browse by in-use authority main entries and find +-- bibs that use unauthorized versions of the authority's value, by string matching. +-- +BEGIN; + +ALTER TABLE config.metabib_field ADD COLUMN joiner TEXT; +UPDATE config.metabib_field SET joiner = ' -- ' WHERE field_class = 'subject' AND name NOT IN ('name', 'complete'); + +-- To avoid problems with altering a table column after doing an +-- update. +ALTER TABLE authority.control_set_authority_field DISABLE TRIGGER ALL; + +ALTER TABLE authority.control_set_authority_field ADD COLUMN joiner TEXT; +UPDATE authority.control_set_authority_field SET joiner = ' -- ' WHERE tag LIKE ANY (ARRAY['_4_','_5_','_8_']); + +ALTER TABLE authority.control_set_authority_field ENABLE TRIGGER ALL; + +-- Seed data will be generated from class <-> axis mapping +CREATE TABLE authority.control_set_bib_field_metabib_field_map ( + id SERIAL PRIMARY KEY, + bib_field INT NOT NULL REFERENCES authority.control_set_bib_field (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + metabib_field INT NOT NULL REFERENCES config.metabib_field (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + CONSTRAINT a_bf_mf_map_once UNIQUE (bib_field, metabib_field) +); + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_main AS + SELECT DISTINCT b.authority_field, m.metabib_field + FROM authority.control_set_bib_field_metabib_field_map m JOIN authority.control_set_bib_field b ON (b.id = m.bib_field); +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_main IS $$metabib fields for main entry auth fields$$; + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_refs_only AS + SELECT DISTINCT a.id AS authority_field, m.metabib_field + FROM authority.control_set_authority_field a + JOIN authority.control_set_authority_field ame ON (a.main_entry = ame.id) + JOIN authority.control_set_bib_field b ON (b.authority_field = ame.id) + JOIN authority.control_set_bib_field_metabib_field_map mf ON (mf.bib_field = b.id) + JOIN authority.control_set_auth_field_metabib_field_map_main m ON (ame.id = m.authority_field); +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_refs_only IS $$metabib fields for NON-main entry auth fields$$; + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_refs AS + SELECT * FROM authority.control_set_auth_field_metabib_field_map_main + UNION + SELECT * FROM authority.control_set_auth_field_metabib_field_map_refs_only; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_refs IS $$metabib fields for all auth fields$$; + + +-- blind refs only is probably what we want for lookup in bib/auth browse +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_refs_only AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_refs_only r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_refs_only IS $$metabib fields for NON-main entry auth fields that can't be linked to other records$$; -- ' + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_refs AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_refs r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_refs IS $$metabib fields for all auth fields that can't be linked to other records$$; -- ' + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_main AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_main r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_main IS $$metabib fields for main entry auth fields that can't be linked to other records$$; -- ' + +CREATE OR REPLACE FUNCTION authority.normalize_heading( marcxml TEXT, no_thesaurus BOOL ) RETURNS TEXT AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + sf_node TEXT; + tag_node TEXT; + thes_code TEXT; + cset INT; + heading_text TEXT; + tmp_text TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + thes_code := vandelay.marc21_extract_fixed_field(marcxml,'Subj'); + IF thes_code IS NULL THEN + thes_code := '|'; + ELSIF thes_code = 'z' THEN + thes_code := COALESCE( oils_xpath_string('//*[@tag="040"]/*[@code="f"][1]', marcxml), '' ); + END IF; + + heading_text := ''; + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset AND main_entry IS NULL LOOP + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + first_sf := TRUE; + + FOR tag_node IN SELECT unnest(oils_xpath('//*[@tag="'||tag_used||'"]',marcxml)) LOOP + FOR sf_node IN SELECT unnest(oils_xpath('./*[contains("'||acsaf.sf_list||'",@code)]',tag_node)) LOOP + + tmp_text := oils_xpath_string('.', sf_node); + sf := oils_xpath_string('./@code', sf_node); + + IF first_sf AND tmp_text IS NOT NULL AND nfi_used IS NOT NULL THEN + + tmp_text := SUBSTRING( + tmp_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tag_node), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + + END IF; + + first_sf := FALSE; + + IF tmp_text IS NOT NULL AND tmp_text <> '' THEN + heading_text := heading_text || E'\u2021' || sf || ' ' || tmp_text; + END IF; + END LOOP; + + EXIT WHEN heading_text <> ''; + END LOOP; + + EXIT WHEN heading_text <> ''; + END LOOP; + + IF heading_text <> '' THEN + IF no_thesaurus IS TRUE THEN + heading_text := tag_used || ' ' || public.naco_normalize(heading_text); + ELSE + heading_text := tag_used || '_' || COALESCE(nfi_used,'-') || '_' || thes_code || ' ' || public.naco_normalize(heading_text); + END IF; + ELSE + heading_text := 'NOHEADING_' || thes_code || ' ' || MD5(marcxml); + END IF; + + RETURN heading_text; +END; +$func$ LANGUAGE PLPGSQL IMMUTABLE; + +CREATE OR REPLACE FUNCTION authority.simple_heading_set( marcxml TEXT ) RETURNS SETOF authority.simple_heading AS $func$ +DECLARE + res authority.simple_heading%ROWTYPE; + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + cset INT; + heading_text TEXT; + joiner_text TEXT; + sort_text TEXT; + tmp_text TEXT; + tmp_xml TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + res.record := auth_id; + + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset LOOP + + res.atag := acsaf.id; + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + joiner_text := COALESCE(acsaf.joiner, ' '); + + FOR tmp_xml IN SELECT UNNEST(XPATH('//*[@tag="'||tag_used||'"]', marcxml::XML)) LOOP + + heading_text := COALESCE( + oils_xpath_string('./*[contains("'||acsaf.sf_list||'",@code)]', tmp_xml::TEXT, joiner_text), + '' + ); + + IF nfi_used IS NOT NULL THEN + + sort_text := SUBSTRING( + heading_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tmp_xml::TEXT), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + + ELSE + sort_text := heading_text; + END IF; + + IF heading_text IS NOT NULL AND heading_text <> '' THEN + res.value := heading_text; + res.sort_value := public.naco_normalize(sort_text); + res.index_vector = to_tsvector('keyword'::regconfig, res.sort_value); + RETURN NEXT res; + END IF; + + END LOOP; + + END LOOP; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL IMMUTABLE; + +CREATE TABLE metabib.browse_entry_simple_heading_map ( + id BIGSERIAL PRIMARY KEY, + entry BIGINT REFERENCES metabib.browse_entry (id), + simple_heading BIGINT REFERENCES authority.simple_heading (id) ON DELETE CASCADE +); +CREATE INDEX browse_entry_sh_map_entry_idx ON metabib.browse_entry_simple_heading_map (entry); +CREATE INDEX browse_entry_sh_map_sh_idx ON metabib.browse_entry_simple_heading_map (simple_heading); + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry ( rid BIGINT, default_joiner TEXT ) RETURNS SETOF metabib.field_entry_template AS $func$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$func$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE + FUNCTION metabib.autosuggest_prepare_tsquery(orig TEXT) RETURNS TEXT[] AS +$$ +DECLARE + orig_ended_in_space BOOLEAN; + result RECORD; + plain TEXT; + normalized TEXT; +BEGIN + orig_ended_in_space := orig ~ E'\\s$'; + + orig := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(orig, E'\\W+'), ' ' + ); + + normalized := public.naco_normalize(orig); -- also trim()s + plain := trim(orig); + + IF NOT orig_ended_in_space THEN + plain := plain || ':*'; + normalized := normalized || ':*'; + END IF; + + plain := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(plain, E'\\s+'), ' & ' + ); + normalized := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(normalized, E'\\s+'), ' & ' + ); + + RETURN ARRAY[normalized, plain]; +END; +$$ LANGUAGE PLPGSQL; + +ALTER TYPE metabib.flat_browse_entry_appearance ADD ATTRIBUTE sees TEXT; +ALTER TYPE metabib.flat_browse_entry_appearance ADD ATTRIBUTE asources INT; +ALTER TYPE metabib.flat_browse_entry_appearance ADD ATTRIBUTE aaccurate TEXT; + +CREATE OR REPLACE FUNCTION metabib.browse_bib_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION metabib.browse_authority_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION metabib.browse_authority_refs_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +-- The drop is necessary because the language change from PLPGSQL to SQL +-- carries with it name changes to the parameters +DROP FUNCTION metabib.browse_pivot(INT[], TEXT); +CREATE FUNCTION metabib.browse_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT id FROM metabib.browse_entry + WHERE id IN ( + metabib.browse_bib_pivot($1, $2), + metabib.browse_authority_refs_pivot($1,$2) -- only look in 4xx, 5xx, 7xx of authority + ) + ORDER BY sort_value, value LIMIT 1; +$p$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION metabib.staged_browse( + query TEXT, + fields INT[], + context_org INT, + context_locations INT[], + staff BOOL, + browse_superpage_size INT, + count_up_from_zero BOOL, -- if false, count down from -1 + result_limit INT, + next_pivot_pos INT +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$p$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION metabib.browse( + search_field INT[], + browse_term TEXT, + context_org INT DEFAULT NULL, + context_loc_group INT DEFAULT NULL, + staff BOOL DEFAULT FALSE, + pivot_id BIGINT DEFAULT NULL, + result_limit INT DEFAULT 10 +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + pivot_id := metabib.browse_pivot(search_field, browse_term); + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.sort_value <= ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.sort_value > ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot + ) ORDER BY row_number DESC; + +END; +$p$ LANGUAGE PLPGSQL; + +-- No 4XX inter-authority linking +UPDATE authority.control_set_authority_field SET linking_subfield = NULL; +UPDATE authority.control_set_authority_field SET linking_subfield = '0' WHERE tag LIKE ANY (ARRAY['5%','7%']); + +-- Map between authority controlled bib fields and stock indexing metabib fields +INSERT INTO authority.control_set_bib_field_metabib_field_map (bib_field, metabib_field) + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '100' AND m.name = 'personal' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '110' AND m.name = 'corporate' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '111' AND m.name = 'conference' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '130' AND m.name = 'uniform' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '148' AND m.name = 'temporal' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '150' AND m.name = 'topic' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '151' AND m.name = 'geographic' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '155' AND m.name = 'genre' -- Just in case... +; + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete () RETURNS TRIGGER AS $func$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; +BEGIN + + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; + diff --git a/KCLS/sql/browse/005.0839.data.alternative-title-indexing.sql b/KCLS/sql/browse/005.0839.data.alternative-title-indexing.sql new file mode 100644 index 0000000000..5cc4061b0d --- /dev/null +++ b/KCLS/sql/browse/005.0839.data.alternative-title-indexing.sql @@ -0,0 +1,100 @@ +BEGIN; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and starts-with(@type,'alternative')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'alternative' AND format = 'mods32'; + +COMMIT; + +-- The following function only appears in the upgrade script and not the +-- baseline schema because it's not necessary in the latter (and it's a +-- temporary function). It just serves to do a hopefully cheaper, more +-- focused reingest just to hit the alternative title index. + +-- This cribs from the guts of metabib.reingest_metabib_field_entries(), +-- and if it actually is a timesaver over a full reingest, then at some +-- point in the future it would be nice if we broke it out into a separate +-- function to make things like this easier. + +CREATE OR REPLACE FUNCTION pg_temp.alternative_title_reingest( bib_id BIGINT ) RETURNS VOID AS $func$ +DECLARE + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL := false; + b_skip_browse BOOL := false; + b_skip_search BOOL := false; + alt_title INT; + value_prepped TEXT; +BEGIN + SELECT INTO alt_title id FROM config.metabib_field WHERE field_class = 'title' AND name = 'alternative'; + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) WHERE field = alt_title LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + -- Avoid inserting duplicate rows, but retain granularity of being + -- able to search browse fields with "starts with" type operators + -- (for example, for titles of songs in music albums) + IF (ind_data.search_field OR ind_data.browse_field) AND NOT b_skip_search THEN + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +--SELECT pg_temp.alternative_title_reingest(id) FROM biblio.record_entry WHERE NOT deleted; diff --git a/KCLS/sql/browse/006.0844.data.better_mods_for_browse_etc.sql b/KCLS/sql/browse/006.0844.data.better_mods_for_browse_etc.sql new file mode 100644 index 0000000000..e3a018210f --- /dev/null +++ b/KCLS/sql/browse/006.0844.data.better_mods_for_browse_etc.sql @@ -0,0 +1,3620 @@ +BEGIN; + +-- 953.data.MODS32-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">abfgk</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abfgk + + + + + + + + + + + <xsl:value-of select="substring($titleBrowseChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleBrowseChop"/> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + a + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,$nfi+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + + ah + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + + + av + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + + adfgklmorsv + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + issn + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + ba + ab + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abje + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + + + adfhklor + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods32'; + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry ( rid BIGINT, default_joiner TEXT ) RETURNS SETOF metabib.field_entry_template AS $func$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field = true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$func$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +-- Don't use Title Proper search field as the browse field +UPDATE config.metabib_field SET browse_field = FALSE, browse_xpath = NULL, browse_sort_xpath = NULL WHERE id = 6; + +-- Create a new Title Proper browse config +INSERT INTO config.metabib_field ( id, field_class, name, label, format, xpath, search_field, authority_xpath, browse_field, browse_sort_xpath ) VALUES + (31, 'title', 'browse', oils_i18n_gettext(31, 'Title Proper (Browse)', 'cmf', 'label'), 'mods32', $$//mods32:mods/mods32:titleBrowse$$, FALSE, '//@xlink:href', TRUE, $$*[local-name() != "nonSort"]$$ ); + +COMMIT; + +-- \qecho This is a browse-only reingest of your bib records. It may take a while. +-- \qecho You may cancel now without losing the effect of the rest of the +-- \qecho upgrade script, and arrange the reingest later. +-- \qecho . +-- SELECT metabib.reingest_metabib_field_entries(id, TRUE, FALSE, TRUE) +-- FROM biblio.record_entry; diff --git a/KCLS/sql/browse/007.0845.schema.browse_pivots_stable.sql b/KCLS/sql/browse/007.0845.schema.browse_pivots_stable.sql new file mode 100644 index 0000000000..b4ab854af0 --- /dev/null +++ b/KCLS/sql/browse/007.0845.schema.browse_pivots_stable.sql @@ -0,0 +1,8 @@ +BEGIN; + +ALTER FUNCTION metabib.browse_pivot (integer[], text) STABLE; +ALTER FUNCTION metabib.browse_bib_pivot (integer[], text) STABLE; +ALTER FUNCTION metabib.browse_authority_pivot (integer[], text) STABLE; +ALTER FUNCTION metabib.browse_authority_refs_pivot (integer[], text) STABLE; + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/browse/008.0846.function.vand-add_field.sql b/KCLS/sql/browse/008.0846.function.vand-add_field.sql new file mode 100644 index 0000000000..f1cd1fa61f --- /dev/null +++ b/KCLS/sql/browse/008.0846.function.vand-add_field.sql @@ -0,0 +1,81 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION vandelay.add_field ( target_xml TEXT, source_xml TEXT, field TEXT, force_add INT ) RETURNS TEXT AS $_$ + + use MARC::Record; + use MARC::File::XML (BinaryEncoding => 'UTF-8'); + use MARC::Charset; + use strict; + + MARC::Charset->assume_unicode(1); + + my $target_xml = shift; + my $source_xml = shift; + my $field_spec = shift; + my $force_add = shift || 0; + + my $target_r = MARC::Record->new_from_xml( $target_xml ); + my $source_r = MARC::Record->new_from_xml( $source_xml ); + + return $target_xml unless ($target_r && $source_r); + + my @field_list = split(',', $field_spec); + + my %fields; + for my $f (@field_list) { + $f =~ s/^\s*//; $f =~ s/\s*$//; + if ($f =~ /^(.{3})(\w*)(?:\[([^]]*)\])?$/) { + my $field = $1; + $field =~ s/\s+//; + my $sf = $2; + $sf =~ s/\s+//; + my $match = $3; + $match =~ s/^\s*//; $match =~ s/\s*$//; + $fields{$field} = { sf => [ split('', $sf) ] }; + if ($match) { + my ($msf,$mre) = split('~', $match); + if (length($msf) > 0 and length($mre) > 0) { + $msf =~ s/^\s*//; $msf =~ s/\s*$//; + $mre =~ s/^\s*//; $mre =~ s/\s*$//; + $fields{$field}{match} = { sf => $msf, re => qr/$mre/ }; + } + } + } + } + + for my $f ( keys %fields) { + if ( @{$fields{$f}{sf}} ) { + for my $from_field ($source_r->field( $f )) { + my @tos = $target_r->field( $f ); + if (!@tos) { + next if (exists($fields{$f}{match}) and !$force_add); + my @new_fields = map { $_->clone } $source_r->field( $f ); + $target_r->insert_fields_ordered( @new_fields ); + } else { + for my $to_field (@tos) { + if (exists($fields{$f}{match})) { + next unless (grep { $_ =~ $fields{$f}{match}{re} } $to_field->subfield($fields{$f}{match}{sf})); + } + for my $old_sf ($from_field->subfields) { + $to_field->add_subfields( @$old_sf ) if grep(/$$old_sf[0]/,@{$fields{$f}{sf}}); + } + } + } + } + } else { + my @new_fields = map { $_->clone } $source_r->field( $f ); + $target_r->insert_fields_ordered( @new_fields ); + } + } + + $target_xml = $target_r->as_xml_record; + $target_xml =~ s/^<\?.+?\?>$//mo; + $target_xml =~ s/\n//sgo; + $target_xml =~ s/>\s+ ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/011.kmain538.replace_metabib_reingest_metabib_field_entries.sql b/KCLS/sql/browse/011.kmain538.replace_metabib_reingest_metabib_field_entries.sql new file mode 100644 index 0000000000..624d436ad5 --- /dev/null +++ b/KCLS/sql/browse/011.kmain538.replace_metabib_reingest_metabib_field_entries.sql @@ -0,0 +1,104 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/012.kmain553.Author.SQL b/KCLS/sql/browse/012.kmain553.Author.SQL new file mode 100644 index 0000000000..c00c6fb4e4 --- /dev/null +++ b/KCLS/sql/browse/012.kmain553.Author.SQL @@ -0,0 +1,142 @@ +-- STEP 1: Create entry table + +-- Table: metabib.browse_author_entry + +-- DROP TABLE metabib.browse_author_entry_def_map; + +-- DROP TABLE metabib.browse_author_entry; + +CREATE TABLE metabib.browse_author_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_author_entry + OWNER TO evergreen; + +-- Index: metabib.browse_author_entry_sort_value_idx + +-- DROP INDEX metabib.browse_author_entry_sort_value_idx; + +CREATE INDEX browse_author_entry_sort_value_idx + ON metabib.browse_author_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_author_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_author_entry_truncated_sort_value_idx; + +CREATE INDEX browse_author_entry_truncated_sort_value_idx + ON metabib.browse_author_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_author_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_author_entry_index_vector_idx; + +CREATE INDEX metabib_browse_author_entry_index_vector_idx + ON metabib.browse_author_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_author_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_author_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_author_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_author_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_author_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'author'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_author_entry +ADD CONSTRAINT browse_author_entry_pkey PRIMARY KEY (id); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_author_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_author_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_author_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_author_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_author_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_author_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_author_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_author_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_author_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_author_entry_def_map_def_idx; + +CREATE INDEX browse_author_entry_def_map_def_idx + ON metabib.browse_author_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_author_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_author_entry_def_map_entry_idx; + +CREATE INDEX browse_author_entry_def_map_entry_idx + ON metabib.browse_author_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_author_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_author_entry_def_map_source_idx; + +CREATE INDEX browse_author_entry_def_map_source_idx + ON metabib.browse_author_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_author_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'author'); + diff --git a/KCLS/sql/browse/013.kmain553.Series.SQL b/KCLS/sql/browse/013.kmain553.Series.SQL new file mode 100644 index 0000000000..e607570601 --- /dev/null +++ b/KCLS/sql/browse/013.kmain553.Series.SQL @@ -0,0 +1,142 @@ +-- STEP 1: Create entry table + +-- Table: metabib.browse_series_entry + +-- DROP TABLE metabib.browse_series_entry_def_map; + +-- DROP TABLE metabib.browse_series_entry; + +CREATE TABLE metabib.browse_series_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_series_entry + OWNER TO evergreen; + +-- Index: metabib.browse_series_entry_sort_value_idx + +-- DROP INDEX metabib.browse_series_entry_sort_value_idx; + +CREATE INDEX browse_series_entry_sort_value_idx + ON metabib.browse_series_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_series_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_series_entry_truncated_sort_value_idx; + +CREATE INDEX browse_series_entry_truncated_sort_value_idx + ON metabib.browse_series_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_series_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_series_entry_index_vector_idx; + +CREATE INDEX metabib_browse_series_entry_index_vector_idx + ON metabib.browse_series_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_series_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_series_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_series_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_series_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_series_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'series'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_series_entry +ADD CONSTRAINT browse_series_entry_pkey PRIMARY KEY (id); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_series_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_series_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_series_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_series_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_series_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_series_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_series_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_series_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_series_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_series_entry_def_map_def_idx; + +CREATE INDEX browse_series_entry_def_map_def_idx + ON metabib.browse_series_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_series_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_series_entry_def_map_entry_idx; + +CREATE INDEX browse_series_entry_def_map_entry_idx + ON metabib.browse_series_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_series_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_series_entry_def_map_source_idx; + +CREATE INDEX browse_series_entry_def_map_source_idx + ON metabib.browse_series_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_series_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'series'); + diff --git a/KCLS/sql/browse/014.kmain553.Subject.SQL b/KCLS/sql/browse/014.kmain553.Subject.SQL new file mode 100644 index 0000000000..2c0ab56327 --- /dev/null +++ b/KCLS/sql/browse/014.kmain553.Subject.SQL @@ -0,0 +1,142 @@ +-- STEP 1: Create entry table + +-- Table: metabib.browse_subject_entry + +-- DROP TABLE metabib.browse_subject_entry_def_map; + +-- DROP TABLE metabib.browse_subject_entry; + +CREATE TABLE metabib.browse_subject_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_subject_entry + OWNER TO evergreen; + +-- Index: metabib.browse_subject_entry_sort_value_idx + +-- DROP INDEX metabib.browse_subject_entry_sort_value_idx; + +CREATE INDEX browse_subject_entry_sort_value_idx + ON metabib.browse_subject_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_subject_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_subject_entry_truncated_sort_value_idx; + +CREATE INDEX browse_subject_entry_truncated_sort_value_idx + ON metabib.browse_subject_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_subject_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_subject_entry_index_vector_idx; + +CREATE INDEX metabib_browse_subject_entry_index_vector_idx + ON metabib.browse_subject_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_subject_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_subject_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_subject_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_subject_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_subject_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'subject'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_subject_entry +ADD CONSTRAINT browse_subject_entry_pkey PRIMARY KEY (id); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_subject_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_subject_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_subject_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_subject_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_subject_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_subject_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_subject_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_subject_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_subject_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_subject_entry_def_map_def_idx; + +CREATE INDEX browse_subject_entry_def_map_def_idx + ON metabib.browse_subject_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_subject_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_subject_entry_def_map_entry_idx; + +CREATE INDEX browse_subject_entry_def_map_entry_idx + ON metabib.browse_subject_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_subject_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_subject_entry_def_map_source_idx; + +CREATE INDEX browse_subject_entry_def_map_source_idx + ON metabib.browse_subject_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_subject_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'subject'); + diff --git a/KCLS/sql/browse/015.kmain553.Title.SQL b/KCLS/sql/browse/015.kmain553.Title.SQL new file mode 100644 index 0000000000..1371c241aa --- /dev/null +++ b/KCLS/sql/browse/015.kmain553.Title.SQL @@ -0,0 +1,142 @@ +-- STEP 1: Create entry table + +-- Table: metabib.browse_title_entry + +-- DROP TABLE metabib.browse_title_entry_def_map; + +-- DROP TABLE metabib.browse_title_entry; + +CREATE TABLE metabib.browse_title_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_title_entry + OWNER TO evergreen; + +-- Index: metabib.browse_title_entry_sort_value_idx + +-- DROP INDEX metabib.browse_title_entry_sort_value_idx; + +CREATE INDEX browse_title_entry_sort_value_idx + ON metabib.browse_title_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_title_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_title_entry_truncated_sort_value_idx; + +CREATE INDEX browse_title_entry_truncated_sort_value_idx + ON metabib.browse_title_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_title_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_title_entry_index_vector_idx; + +CREATE INDEX metabib_browse_title_entry_index_vector_idx + ON metabib.browse_title_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_title_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_title_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_title_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_title_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_title_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'title'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_title_entry +ADD CONSTRAINT browse_title_entry_pkey PRIMARY KEY (id); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_title_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_title_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_title_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_title_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_title_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_title_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_title_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_title_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_title_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_title_entry_def_map_def_idx; + +CREATE INDEX browse_title_entry_def_map_def_idx + ON metabib.browse_title_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_title_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_title_entry_def_map_entry_idx; + +CREATE INDEX browse_title_entry_def_map_entry_idx + ON metabib.browse_title_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_title_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_title_entry_def_map_source_idx; + +CREATE INDEX browse_title_entry_def_map_source_idx + ON metabib.browse_title_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_title_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'title'); + diff --git a/KCLS/sql/browse/016.kmain553.metabib_browse_function.SQL b/KCLS/sql/browse/016.kmain553.metabib_browse_function.SQL new file mode 100644 index 0000000000..e4ac9ad8b7 --- /dev/null +++ b/KCLS/sql/browse/016.kmain553.metabib_browse_function.SQL @@ -0,0 +1,376 @@ +-- browse_authority_refs_pivot -------------------------------------------------------------------- + +DROP FUNCTION metabib.browse_authority_refs_pivot(integer[], text); + +-- author + +CREATE OR REPLACE FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_author_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-- title + +CREATE OR REPLACE FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_title_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-- subject + +CREATE OR REPLACE FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_subject_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-- series + +CREATE OR REPLACE FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_series_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-- metabib.browse_bib_pivot(integer[], text) ------------------------------------------------------ + +DROP FUNCTION metabib.browse_bib_pivot(integer[], text); + +-- author + +CREATE OR REPLACE FUNCTION metabib.browse_author_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_author_entry mbe + JOIN metabib.browse_author_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_bib_pivot(integer[], text) + OWNER TO evergreen; + +-- title + +CREATE OR REPLACE FUNCTION metabib.browse_title_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_title_entry mbe + JOIN metabib.browse_title_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_bib_pivot(integer[], text) + OWNER TO evergreen; + +-- subject + +CREATE OR REPLACE FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_subject_entry mbe + JOIN metabib.browse_subject_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + OWNER TO evergreen; + +-- series + +CREATE OR REPLACE FUNCTION metabib.browse_series_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_series_entry mbe + JOIN metabib.browse_series_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_bib_pivot(integer[], text) + OWNER TO evergreen; + + +-- browse_pivot -------------------------------------------------------------------- + +DROP FUNCTION metabib.browse_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_author_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + + SELECT id FROM metabib.browse_author_entry + WHERE id IN ( + metabib.browse_author_bib_pivot(search_field, browse_term), + metabib.browse_author_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_pivot(integer[], text) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION metabib.browse_title_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + + SELECT id FROM metabib.browse_title_entry + WHERE id IN ( + metabib.browse_title_bib_pivot(search_field, browse_term), + metabib.browse_title_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_pivot(integer[], text) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION metabib.browse_subject_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + + SELECT id FROM metabib.browse_subject_entry + WHERE id IN ( + metabib.browse_subject_bib_pivot(search_field, browse_term), + metabib.browse_subject_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_pivot(integer[], text) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION metabib.browse_series_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + + SELECT id FROM metabib.browse_series_entry + WHERE id IN ( + metabib.browse_series_bib_pivot(search_field, browse_term), + metabib.browse_series_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_pivot(integer[], text) + OWNER TO evergreen; + + + +-- Function: metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + + + -- Get search field int list with search_class + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + END CASE; + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.truncated_sort_value <= ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.truncated_sort_value > ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/017.kmain553.metabib.staged_browse_function.SQL b/KCLS/sql/browse/017.kmain553.metabib.staged_browse_function.SQL new file mode 100644 index 0000000000..0c8adeeed0 --- /dev/null +++ b/KCLS/sql/browse/017.kmain553.metabib.staged_browse_function.SQL @@ -0,0 +1,237 @@ +-- Function: metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer) + +-- DROP FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer); + +CREATE OR REPLACE FUNCTION metabib.staged_browse(query text, fields integer[], context_org integer, context_locations integer[], staff boolean, browse_superpage_size integer, count_up_from_zero boolean, result_limit integer, next_pivot_pos integer, search_class text) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + CASE search_class + WHEN 'author' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'title' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_title_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'subject' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_subject_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'series' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_series_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + ELSE + END CASE; + + + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/018.kmain553.metabib.reingest_metabib_field_entries.SQL b/KCLS/sql/browse/018.kmain553.metabib.reingest_metabib_field_entries.SQL new file mode 100644 index 0000000000..92e9363f55 --- /dev/null +++ b/KCLS/sql/browse/018.kmain553.metabib.reingest_metabib_field_entries.SQL @@ -0,0 +1,174 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/020.kmain212.metabib.staged_browse.SQL b/KCLS/sql/browse/020.kmain212.metabib.staged_browse.SQL new file mode 100644 index 0000000000..2900b15932 --- /dev/null +++ b/KCLS/sql/browse/020.kmain212.metabib.staged_browse.SQL @@ -0,0 +1,246 @@ +-- Function: metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + +-- DROP FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text); + +CREATE OR REPLACE FUNCTION metabib.staged_browse(query text, fields integer[], context_org integer, context_locations integer[], staff boolean, browse_superpage_size integer, count_up_from_zero boolean, result_limit integer, next_pivot_pos integer, search_class text) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + CASE search_class + WHEN 'author' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'title' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_title_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'subject' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_subject_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'series' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_series_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'call_number' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_call_number_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + ELSE + + END CASE; + + + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/021.kmain212.biblio.extract_metabib_field_entry.SQL b/KCLS/sql/browse/021.kmain212.biblio.extract_metabib_field_entry.SQL new file mode 100644 index 0000000000..0aa670c137 --- /dev/null +++ b/KCLS/sql/browse/021.kmain212.biblio.extract_metabib_field_entry.SQL @@ -0,0 +1,208 @@ +-- Function: biblio.extract_metabib_field_entry(bigint, text) + +-- DROP FUNCTION biblio.extract_metabib_field_entry(bigint, text); + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry(rid bigint, default_joiner text) + RETURNS SETOF metabib.field_entry_template AS +$BODY$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field = true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + + IF idx.field_class = 'identifier' AND idx.name = 'bibcn' THEN + output_row.field_class = 'call_number'; + output_row.browse_field = TRUE; + output_row.sort_value = lower(BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g'))); + + ELSE + output_row.field_class = idx.field_class; + + END IF; + + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION biblio.extract_metabib_field_entry(bigint, text) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/022.kmain212.metabib.reingest_metabib_field_entries.SQL b/KCLS/sql/browse/022.kmain212.metabib.reingest_metabib_field_entries.SQL new file mode 100644 index 0000000000..c7c0f08a6b --- /dev/null +++ b/KCLS/sql/browse/022.kmain212.metabib.reingest_metabib_field_entries.SQL @@ -0,0 +1,193 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/023.kmain212.Call_Number.SQL b/KCLS/sql/browse/023.kmain212.Call_Number.SQL new file mode 100644 index 0000000000..4e76ddf596 --- /dev/null +++ b/KCLS/sql/browse/023.kmain212.Call_Number.SQL @@ -0,0 +1,223 @@ +-- STEP 1: Create entry table + +-- Table: metabib.browse_call_number_entry + +-- DROP TABLE metabib.browse_call_number_entry_def_map; + +-- DROP TABLE metabib.browse_call_number_entry; + +CREATE TABLE metabib.browse_call_number_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_call_number_entry + OWNER TO evergreen; + +-- Index: metabib.browse_call_number_entry_sort_value_idx + +-- DROP INDEX metabib.browse_call_number_entry_sort_value_idx; + +CREATE INDEX browse_call_number_entry_sort_value_idx + ON metabib.browse_call_number_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_call_number_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_call_number_entry_truncated_sort_value_idx; + +CREATE INDEX browse_call_number_entry_truncated_sort_value_idx + ON metabib.browse_call_number_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_call_number_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_call_number_entry_index_vector_idx; + +CREATE INDEX metabib_browse_call_number_entry_index_vector_idx + ON metabib.browse_call_number_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_call_number_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_call_number_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_call_number_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_call_number_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_call_number_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.id = 25); -- identifier, bibcn + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_call_number_entry +ADD CONSTRAINT browse_call_number_entry_pkey PRIMARY KEY (id); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_call_number_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_call_number_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_call_number_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_call_number_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_call_number_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_call_number_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_call_number_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_call_number_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_call_number_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_call_number_entry_def_map_def_idx; + +CREATE INDEX browse_call_number_entry_def_map_def_idx + ON metabib.browse_call_number_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_call_number_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_call_number_entry_def_map_entry_idx; + +CREATE INDEX browse_call_number_entry_def_map_entry_idx + ON metabib.browse_call_number_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_call_number_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_call_number_entry_def_map_source_idx; + +CREATE INDEX browse_call_number_entry_def_map_source_idx + ON metabib.browse_call_number_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_call_number_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.id = 25); -- identifier, bibcn + + +-- STEP 6: metabib.call_number_field_entry + +-- DROP TABLE metabib.call_number_field_entry; + +CREATE TABLE metabib.call_number_field_entry +( + id bigserial NOT NULL, + source bigint NOT NULL, + field integer NOT NULL, + value text NOT NULL, + index_vector tsvector NOT NULL, + CONSTRAINT call_number_field_entry_pkey PRIMARY KEY (id ) +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.call_number_field_entry + OWNER TO evergreen; +GRANT ALL ON TABLE metabib.call_number_field_entry TO evergreen; +GRANT ALL ON TABLE metabib.call_number_field_entry TO bbonner; +GRANT SELECT ON TABLE metabib.call_number_field_entry TO biblio; + +-- Index: metabib.metabib_call_number_field_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_call_number_field_entry_index_vector_idx; + +CREATE INDEX metabib_call_number_field_entry_index_vector_idx + ON metabib.call_number_field_entry + USING gist + (index_vector ); + +-- Index: metabib.metabib_call_number_field_entry_source_idx + +-- DROP INDEX metabib.metabib_call_number_field_entry_source_idx; + +CREATE INDEX metabib_call_number_field_entry_source_idx + ON metabib.call_number_field_entry + USING btree + (source ); + +-- Index: metabib.metabib_call_number_field_entry_value_idx + +-- DROP INDEX metabib.metabib_call_number_field_entry_value_idx; + +CREATE INDEX metabib_call_number_field_entry_value_idx + ON metabib.call_number_field_entry + USING btree + ("substring"(value, 1, 1024) COLLATE pg_catalog."default" ) + WHERE index_vector = ''::tsvector; + + +-- Trigger: metabib_call_number_field_entry_fti_trigger on metabib.call_number_field_entry + +-- DROP TRIGGER metabib_call_number_field_entry_fti_trigger ON metabib.call_number_field_entry; + +CREATE TRIGGER metabib_call_number_field_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.call_number_field_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('call_number'); + +-- Trigger: normalize_field_entry on metabib.call_number_field_entry + +-- DROP TRIGGER normalize_field_entry ON metabib.call_number_field_entry; + +CREATE TRIGGER normalize_field_entry + AFTER INSERT OR UPDATE + ON metabib.call_number_field_entry + FOR EACH ROW + EXECUTE PROCEDURE metabib.normalized_field_entry_view(); +ALTER TABLE metabib.call_number_field_entry DISABLE TRIGGER normalize_field_entry; + + + +-- STEP 7: set call_number as browsable + +UPDATE config.metabib_field + SET browse_field=true + WHERE field_class = 'identifier' AND name = 'bibcn'; + diff --git a/KCLS/sql/browse/024.kmain212.metabib_browse_function.SQL b/KCLS/sql/browse/024.kmain212.metabib_browse_function.SQL new file mode 100644 index 0000000000..3e3627839e --- /dev/null +++ b/KCLS/sql/browse/024.kmain212.metabib_browse_function.SQL @@ -0,0 +1,233 @@ +-- browse_authority_refs_pivot -------------------------------------------------------------------- + +-- call_number + +CREATE OR REPLACE FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= public.naco_normalize($1) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-- metabib.browse_bib_pivot(integer[], text) ------------------------------------------------------ + +-- call_number + +CREATE OR REPLACE FUNCTION metabib.browse_call_number_bib_pivot(text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= public.naco_normalize($1) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_bib_pivot(text) + OWNER TO evergreen; + + + +-- browse_pivot -------------------------------------------------------------------- + + +CREATE OR REPLACE FUNCTION metabib.browse_call_number_pivot(browse_term text) + RETURNS bigint AS +$BODY$ + + SELECT id FROM metabib.browse_call_number_entry + WHERE id IN ( + metabib.browse_call_number_bib_pivot(browse_term), + metabib.browse_call_number_authority_refs_pivot(browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_pivot(text) + OWNER TO evergreen; + + +-- Function: metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + + + + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.truncated_sort_value <= ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.truncated_sort_value > ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + + + diff --git a/KCLS/sql/browse/025.kmain936.remove_browse_entry_references.SQL b/KCLS/sql/browse/025.kmain936.remove_browse_entry_references.SQL new file mode 100644 index 0000000000..43fcc07ffa --- /dev/null +++ b/KCLS/sql/browse/025.kmain936.remove_browse_entry_references.SQL @@ -0,0 +1,334 @@ +-- This function is never referenced +DROP FUNCTION IF EXISTS browse_authority_pivot(integer[], text); + +-- Just change mbe_row to browse_author_entry instead of browse_entry +CREATE OR REPLACE FUNCTION indexing_ingest_or_delete() RETURNS trigger + LANGUAGE plpgsql + AS $_$ +DECLARE + ashps authority.simple_heading_plus%ROWTYPE; + mbe_row metabib.browse_author_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashps IN SELECT * FROM authority.simple_heading_plus_set(NEW.marc) LOOP + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashps.record, ashps.atag, ashps.value, ashps.sort_value); + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- Get the search_class + SELECT INTO search_class cmf.field_class + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashps.atag; + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$_$; + +-- Just change mbe_row to browse_author_entry instead of browse_entry +CREATE OR REPLACE FUNCTION reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) RETURNS void + LANGUAGE plpgsql + AS $_X$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_author_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$_X$; + +-- This function is never \ No newline at end of file diff --git a/KCLS/sql/browse/KMAIN-877.sql b/KCLS/sql/browse/KMAIN-877.sql new file mode 100644 index 0000000000..9975bbd750 --- /dev/null +++ b/KCLS/sql/browse/KMAIN-877.sql @@ -0,0 +1,176 @@ +-- Function: metabib.browse(text, text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + --ver1.1 updated with kmain-806 + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + )'; + IF search_class != 'call_number' THEN + + core_query := core_query || ' OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + )'; + + END IF; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ') AND mbe.truncated_sort_value <= ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ') AND mbe.truncated_sort_value > ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/000.remove_old_auth_data.sql b/KCLS/sql/browse/combined/000.remove_old_auth_data.sql new file mode 100644 index 0000000000..5a1eb3351f --- /dev/null +++ b/KCLS/sql/browse/combined/000.remove_old_auth_data.sql @@ -0,0 +1,16 @@ +BEGIN; + +-- Remove old authority data. Handle all foreign key constraints as well. + +TRUNCATE TABLE + authority.bib_linking, + authority.record_note, + vandelay.authority_match, + vandelay.queued_authority_record, + authority.simple_heading, + vandelay.queued_authority_record_attr, + authority.record_entry RESTART IDENTITY; + +TRUNCATE TABLE authority.full_rec RESTART IDENTITY; + +COMMIT; diff --git a/KCLS/sql/browse/combined/0001.public.export-ids-086-092-099.sql b/KCLS/sql/browse/combined/0001.public.export-ids-086-092-099.sql new file mode 100644 index 0000000000..40b9b59e0b --- /dev/null +++ b/KCLS/sql/browse/combined/0001.public.export-ids-086-092-099.sql @@ -0,0 +1,36 @@ +-- Function: public.export_ids_086_092_099(bigint) + +-- DROP FUNCTION public.export_ids_086_092_099(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_086_092_099(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if a record has at least one of the following fields 086 or 092 or 099 and every instance +-- of each of these fields must not contain “ON ORDER” +DECLARE + v_row_count BIGINT; +BEGIN + + SELECT count(*) INTO v_row_count + FROM metabib.real_full_rec + WHERE (tag = '086' OR tag = '092' OR tag = '099') AND value ILIKE '%on order%' AND record = record_to_check; + + IF v_row_count > 0 THEN + RETURN FALSE; + END IF; + + SELECT count(*) INTO v_row_count + FROM metabib.real_full_rec + WHERE (tag = '086' OR tag = '092' OR tag = '099') AND value NOT ILIKE '%on order%' AND record = record_to_check; + + IF v_row_count > 0 THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION public.export_ids_086_092_099(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/0002.public.export-ids-001.sql b/KCLS/sql/browse/combined/0002.public.export-ids-001.sql new file mode 100644 index 0000000000..1a8e1d3ad4 --- /dev/null +++ b/KCLS/sql/browse/combined/0002.public.export-ids-001.sql @@ -0,0 +1,30 @@ +-- Function: public.export_ids_001(bigint) + +-- DROP FUNCTION public.export_ids_001(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_001(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if the record has at least one 001 field and at least one of those fields should contain “oc” +-- OR has at least one 035 field with "WaOLN" in subfield "a" +DECLARE + v_row_count BIGINT; +BEGIN + + SELECT count(*) INTO v_row_count + FROM metabib.real_full_rec + WHERE ((tag = '001' AND (value ILIKE 'oc%' OR value ILIKE 'on%' OR value ILIKE 'wln%')) + OR (tag = '035' AND subfield ILIKE 'a' AND value ILIKE '%WaOLN%')) + AND record = record_to_check; + + IF v_row_count > 0 THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION public.export_ids_001(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/0003.public.export-ids-998.sql b/KCLS/sql/browse/combined/0003.public.export-ids-998.sql new file mode 100644 index 0000000000..729a238bad --- /dev/null +++ b/KCLS/sql/browse/combined/0003.public.export-ids-998.sql @@ -0,0 +1,31 @@ +-- Function: public.export_ids_998(bigint) + +-- DROP FUNCTION public.export_ids_998(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_998(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if a record has items attached OR the record must have at least one 998 field with subfield d that has +-- one of the following values ‘d’ or ‘t’ or ‘v’ or ‘w’ or ‘x’ or ‘y’ or ‘1’ +DECLARE + v_row_count BIGINT; +BEGIN + + SELECT count(*) INTO v_row_count + FROM metabib.real_full_rec + WHERE tag = '998' + AND subfield = 'd' + AND value IN ('d','t','v','w','x','y','1') + AND record = record_to_check; + + IF v_row_count > 0 OR public.export_ids_has_copy(record_to_check) THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION public.export_ids_998(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/0004.public.export-ids-has-copy.sql b/KCLS/sql/browse/combined/0004.public.export-ids-has-copy.sql new file mode 100644 index 0000000000..326d882813 --- /dev/null +++ b/KCLS/sql/browse/combined/0004.public.export-ids-has-copy.sql @@ -0,0 +1,32 @@ +-- Function: public.export_ids_has_copy(bigint) + +-- DROP FUNCTION public.export_ids_has_copy(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_has_copy(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if a record has a copy attached +DECLARE + v_row_count BIGINT; +BEGIN + + SELECT count(acp.id) INTO v_row_count + FROM biblio.record_entry AS bre + JOIN asset.call_number AS acn + ON bre.id = acn.record + JOIN asset.copy AS acp + ON acn.id = acp.call_number + WHERE bre.id = record_to_check; + + IF v_row_count > 0 THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION public.export_ids_has_copy(bigint) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/combined/0005.public.export-ids-ldr.sql b/KCLS/sql/browse/combined/0005.public.export-ids-ldr.sql new file mode 100644 index 0000000000..b624f93de0 --- /dev/null +++ b/KCLS/sql/browse/combined/0005.public.export-ids-ldr.sql @@ -0,0 +1,27 @@ +-- Function: public.export_ids_LDR(bigint) + +-- DROP FUNCTION public.export_ids_LDR(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_LDR(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if a record has an 'f' in deleted field of biblio.record_entry +DECLARE + export_bib BOOLEAN; +BEGIN + + SELECT deleted into export_bib + FROM biblio.record_entry + WHERE id = record_to_check; + + IF export_bib = 'f' THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION public.export_ids_LDR(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/0006.public.export-ids-cat-date.sql b/KCLS/sql/browse/combined/0006.public.export-ids-cat-date.sql new file mode 100644 index 0000000000..0a6c22a240 --- /dev/null +++ b/KCLS/sql/browse/combined/0006.public.export-ids-cat-date.sql @@ -0,0 +1,30 @@ +-- Function: public.export_ids_cat_date(bigint, date, date) + +-- DROP FUNCTION public.export_ids_cat_date(bigint, date, date); + +CREATE OR REPLACE FUNCTION public.export_ids_cat_date(record_to_check bigint, start_date date, end_date date) + RETURNS boolean AS +$BODY$ + +-- Returns true if a record has a Cat Date that falls between start_date and end_date inclusive. +DECLARE + v_cat_date TIMESTAMP WITH TIME ZONE; +BEGIN + + IF start_date IS NULL OR end_date IS NULL THEN + RETURN TRUE; + ELSE + SELECT cataloging_date INTO v_cat_date + FROM biblio.record_entry + WHERE id = record_to_check; + + IF v_cat_date >= start_date AND v_cat_date <= end_date THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + END IF; +END $BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION public.export_ids_cat_date(bigint, date, date) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/0007.public.export-generate-ids.sql b/KCLS/sql/browse/combined/0007.public.export-generate-ids.sql new file mode 100644 index 0000000000..191bdc5bd1 --- /dev/null +++ b/KCLS/sql/browse/combined/0007.public.export-generate-ids.sql @@ -0,0 +1,34 @@ +-- Function: public.export_generate_ids() + +-- DROP FUNCTION public.export_generate_ids(); + +CREATE OR REPLACE FUNCTION public.export_generate_ids(start_date date, end_date date) + RETURNS SETOF bigint AS +$BODY$ +-- Generates a list of ids for exporting based on the following criteria +-- For a record to be exported it MUST MEET ALL of the following conditions +-- 1.The record must have at least one LDR field and byte 05 of every instance of that field must not be ‘d’. +-- AND +-- 2. The record must have at least one 001 field and at least one of those fields should contain “oc” +-- OR has at least one 035 field with "WaOLN" in subfield "a" +-- AND +-- 3. The record must have at least one of the following fields 086 or 092 or 099 and every instance of each of these fields must not contain “ON ORDER” +-- AND +-- 4. The record must have items attached OR the record must have at least one 998 field with subfield d that has one of the following values ‘d’ or ‘t’ or ‘v’ or ‘w’ or ‘x’ or ‘y’ or ‘1’ +-- AND +-- 5. The records Cat Date must fall between two dates supplied by the user. + + + SELECT id + FROM biblio.record_entry + WHERE public.export_ids_cat_date(id, start_date, end_date) AND public.export_ids_LDR(id) AND public.export_ids_001(id) + AND public.export_ids_086_092_099(id) AND public.export_ids_998(id); + + +$BODY$ + LANGUAGE sql STABLE; +ALTER FUNCTION public.export_generate_ids(date, date) + OWNER TO evergreen; + + + diff --git a/KCLS/sql/browse/combined/001.tables.sql b/KCLS/sql/browse/combined/001.tables.sql new file mode 100644 index 0000000000..2dabaf8979 --- /dev/null +++ b/KCLS/sql/browse/combined/001.tables.sql @@ -0,0 +1,343 @@ +BEGIN; + +-- TABLE: authority.control_set_authority_field +-- To avoid problems with altering a table column after doing an update. +ALTER TABLE authority.control_set_authority_field + DISABLE TRIGGER ALL; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN linking_subfield CHAR(1); + +UPDATE authority.control_set_authority_field + SET linking_subfield = '0' WHERE main_entry IS NOT NULL; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN display_sf_list TEXT; + +UPDATE authority.control_set_authority_field + SET display_sf_list = REGEXP_REPLACE(sf_list, '[iw254]', '', 'g') + WHERE tag LIKE '5__'; + +UPDATE authority.control_set_authority_field + SET display_sf_list = REGEXP_REPLACE(sf_list, '[w254]', '', 'g') + WHERE tag NOT LIKE '5__'; + +ALTER TABLE authority.control_set_authority_field + ALTER COLUMN display_sf_list SET NOT NULL; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN joiner TEXT; + +UPDATE authority.control_set_authority_field + SET joiner = ' -- ' WHERE tag LIKE ANY (ARRAY['_4_','_5_','_8_']); + +UPDATE authority.control_set_authority_field + SET name = 'See Also' + WHERE tag IN ('500', '510', '511', '530', '550', '551', '555', '580','581', '582', '585', '548'); + +UPDATE authority.control_set_authority_field + SET name = 'See' + WHERE tag IN ('400', '410', '411', '430', '450', '451', '455', '480','481', '482', '485', '448', '100', '110', '111', '130', '150', '151', '155', '180', '181', '182', '185', '148'); + +ALTER TABLE authority.control_set_authority_field + ENABLE TRIGGER ALL; + +-- TABLE: authority.authority_linking +CREATE TABLE authority.authority_linking ( + id BIGSERIAL PRIMARY KEY, + source BIGINT REFERENCES authority.record_entry (id) NOT NULL, + target BIGINT REFERENCES authority.record_entry (id) NOT NULL, + field INT REFERENCES authority.control_set_authority_field (id) NOT NULL +); + +-- KMAIN-1312 KMAIN-1324 + +CREATE TABLE metabib.bib_export_data +( + id bigserial NOT NULL, + bib bigint NOT NULL, + export_date timestamp with time zone, + import_date timestamp with time zone, + CONSTRAINT bib_export_data_pkey PRIMARY KEY (id), + CONSTRAINT bib_export_data_bib_fkey FOREIGN KEY (bib) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.bib_export_data + OWNER TO evergreen; +GRANT ALL ON TABLE metabib.bib_export_data TO evergreen; + +-- TABLE: metabib.browse_entry_def_map +ALTER TABLE metabib.browse_entry_def_map + ADD COLUMN authority BIGINT REFERENCES authority.record_entry (id) + ON DELETE SET NULL; + +-- TABLE: config.metabib_field +ALTER TABLE config.metabib_field + ADD COLUMN authority_xpath TEXT; + +ALTER TABLE config.metabib_field + ADD COLUMN browse_sort_xpath TEXT; + +UPDATE config.metabib_field + SET authority_xpath = '//@xlink:href' + WHERE + format = 'mods32' AND + field_class IN ('subject','series','title','author') AND + browse_field IS TRUE; + +UPDATE config.metabib_field + SET authority_xpath = '//@xlink:href' + WHERE + format = 'kcls' AND + field_class IN ('subject','series','title','author') AND + browse_field IS TRUE; + +UPDATE config.metabib_field + SET browse_xpath = '//*[local-name()=''facet'']' + WHERE + field_class LIKE 'subject' AND + format LIKE 'kcls' AND + browse_field; + +UPDATE config.metabib_field + SET browse_xpath = '//*[local-name()=''facet'']' + WHERE + field_class LIKE 'author' AND + format LIKE 'kcls' AND + browse_field; + +UPDATE config.metabib_field + SET browse_xpath = '//*[local-name()=''facet'']' + WHERE + field_class LIKE 'series' AND + format LIKE 'kcls' AND + browse_field; + +ALTER TABLE config.metabib_field + ADD COLUMN joiner TEXT; + +UPDATE config.metabib_field + SET joiner = ' -- ' + WHERE + field_class = 'subject' AND name NOT IN ('name', 'complete'); + +-- DATA: config.metabib_field +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:relatedItem[@type="series"]/mods32:titleInfo[@type="nfi"]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'series' AND name = 'seriestitle' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and not (@type)]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL, + browse_field = TRUE +WHERE + field_class = 'title' AND name = 'proper' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='alternative-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'alternative' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='uniform-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'uniform' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='translated-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'translated' AND format = 'mods32'; + +-- This keeps extra terms like "creator" out of browse headings. +UPDATE config.metabib_field +SET + browse_xpath = $$//*[local-name()='namePart']$$ -- vim */ +WHERE + browse_field AND browse_xpath IS NULL AND field_class = 'author'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and starts-with(@type,'alternative')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'alternative' AND format = 'mods32'; + +-- Don't use Title Proper search field as the browse field +UPDATE config.metabib_field +SET + browse_field = FALSE, browse_xpath = NULL, browse_sort_xpath = NULL +WHERE + id = 6; + +-- Create a new Title Proper browse config +INSERT INTO config.metabib_field ( id, field_class, name, label, format, xpath, search_field, authority_xpath, browse_field, browse_sort_xpath ) +VALUES + (31, 'title', 'browse', oils_i18n_gettext(31, 'Title Proper (Browse)', 'cmf', 'label'), 'mods32', $$//mods32:mods/mods32:titleBrowse$$, FALSE, '//@xlink:href', TRUE, $$*[local-name() != "nonSort"]$$ ); + + +-- TABLE: metabib.field_entry_template +ALTER TYPE metabib.field_entry_template ADD ATTRIBUTE authority BIGINT; +ALTER TYPE metabib.field_entry_template ADD ATTRIBUTE sort_value TEXT; + + +-- DATA: config.global_flag +INSERT INTO config.global_flag (name, value, enabled, label) VALUES +( + 'opac.browse.warnable_regexp_per_class', + '{"title": "^(a|the|an)\\s", "series": "^(a|the|an)\\s", "author": "^(a|the|an)\\s", "subject": "^(a|the|an)\\s"}', + TRUE, + oils_i18n_gettext( + 'opac.browse.warnable_regexp_per_class', + 'Map of search classes to regular expressions to warn user about leading articles.', + 'cgf', + 'label' + ) +), +( + 'opac.browse.holdings_visibility_test_limit', + '100', + TRUE, + oils_i18n_gettext( + 'opac.browse.holdings_visibility_test_limit', + 'Don''t look for more than this number of records with holdings when displaying browse headings with visible record counts.', + 'cgf', + 'label' + ) +); + + +-- TABLE:metabib.browse_entry +-- Section 10: metabib.browse_entry and metabib.browse_entry_def_map + +ALTER TABLE metabib.browse_entry DROP CONSTRAINT browse_entry_value_key; +ALTER TABLE metabib.browse_entry ADD COLUMN sort_value TEXT; +DELETE FROM metabib.browse_entry_def_map; -- Yeah. +DELETE FROM metabib.browse_entry; +ALTER TABLE metabib.browse_entry ALTER COLUMN sort_value SET NOT NULL; +DROP TRIGGER IF EXISTS mbe_sort_value ON metabib.browse_entry; + +/* CREATE INDEX browse_entry_sort_value_idx + ON metabib.browse_entry USING BTREE (sort_value); */ + +-- NOTE If I understand ordered indices correctly, an index on sort_value DESC +-- is not actually needed, even though we do have a query that does ORDER BY +-- on this column in that direction. The previous index serves for both +-- directions, and ordering in an index is only helpful for multi-column +-- indices, I think. See http://www.postgresql.org/docs/9.1/static/indexes-ordering.html + +-- CREATE INDEX CONCURRENTLY browse_entry_sort_value_idx_desc +-- ON metabib.browse_entry USING BTREE (sort_value DESC); + +-- Add new column to browse_entry +ALTER TABLE metabib.browse_entry ADD COLUMN truncated_sort_value text; + +-- Fill new column with truncated values from sort_value +UPDATE metabib.browse_entry SET truncated_sort_value = substr(sort_value, 1, 2700); + +-- Index the new column +CREATE INDEX browse_entry_truncated_sort_value_idx + ON metabib.browse_entry + USING btree (truncated_sort_value COLLATE pg_catalog."default"); + +--Remove the old index on sort_value +-- DROP INDEX metabib.browse_entry_sort_value_idx; + + +-- TABLE: authority.control_set_bib_field_metabib_field_map +-- Seed data will be generated from class <-> axis mapping +CREATE TABLE authority.control_set_bib_field_metabib_field_map ( + id SERIAL PRIMARY KEY, + bib_field INT NOT NULL REFERENCES authority.control_set_bib_field (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + metabib_field INT NOT NULL REFERENCES config.metabib_field (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + CONSTRAINT a_bf_mf_map_once UNIQUE (bib_field, metabib_field) +); + + +-- VIEWS: authority.control_set_bib_field_metabib_field_map +CREATE VIEW authority.control_set_auth_field_metabib_field_map_main AS + SELECT DISTINCT b.authority_field, m.metabib_field + FROM authority.control_set_bib_field_metabib_field_map m JOIN authority.control_set_bib_field b ON (b.id = m.bib_field); +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_main IS $$metabib fields for main entry auth fields$$; + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_refs_only AS + SELECT DISTINCT a.id AS authority_field, m.metabib_field + FROM authority.control_set_authority_field a + JOIN authority.control_set_authority_field ame ON (a.main_entry = ame.id) + JOIN authority.control_set_bib_field b ON (b.authority_field = ame.id) + JOIN authority.control_set_bib_field_metabib_field_map mf ON (mf.bib_field = b.id) + JOIN authority.control_set_auth_field_metabib_field_map_main m ON (ame.id = m.authority_field); +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_refs_only IS $$metabib fields for NON-main entry auth fields$$; + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_refs AS + SELECT * FROM authority.control_set_auth_field_metabib_field_map_main + UNION + SELECT * FROM authority.control_set_auth_field_metabib_field_map_refs_only; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_refs IS $$metabib fields for all auth fields$$; + +-- blind refs only is probably what we want for lookup in bib/auth browse +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_refs_only AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_refs_only r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_refs_only IS $$metabib fields for NON-main entry auth fields that can't be linked to other records$$; -- ' + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_refs AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_refs r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_refs IS $$metabib fields for all auth fields that can't be linked to other records$$; -- ' + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_main AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_main r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_main IS $$metabib fields for main entry auth fields that can't be linked to other records$$; -- ' + + +--This table is not needed because of the separate tables created to replace it. KMAIN-806 +--TABLE: metabib.browse_entry_simple_heading_map +--CREATE TABLE metabib.browse_entry_simple_heading_map ( +-- id BIGSERIAL PRIMARY KEY, +-- entry BIGINT REFERENCES metabib.browse_entry (id), +-- simple_heading BIGINT REFERENCES authority.simple_heading (id) ON DELETE CASCADE +--); +--CREATE INDEX browse_entry_sh_map_entry_idx ON metabib.browse_entry_simple_heading_map (entry); +--CREATE INDEX browse_entry_sh_map_sh_idx ON metabib.browse_entry_simple_heading_map (simple_heading); + + +-- KMAIN-821 +-- Add a type, authority.simple_heading_plus, for returning the original value from the marc field. +CREATE TYPE authority.simple_heading_plus + AS (id bigint, + record bigint, + atag integer, + value text, + sort_value text, + index_vector tsvector, + original_text text); + + +COMMIT; diff --git a/KCLS/sql/browse/combined/002.large_update_to_config_xml_transform.sql b/KCLS/sql/browse/combined/002.large_update_to_config_xml_transform.sql new file mode 100644 index 0000000000..1d60217f9a --- /dev/null +++ b/KCLS/sql/browse/combined/002.large_update_to_config_xml_transform.sql @@ -0,0 +1,6813 @@ +BEGIN; + +-- mods32 from 006 +-- 953.data.MODS32-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">abfgk</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abfgk + + + + + + + + + + + <xsl:value-of select="substring($titleBrowseChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleBrowseChop"/> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + a + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,$nfi+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + + ah + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + + + av + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + + adfgklmorsv + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + issn + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + ba + ab + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abje + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + + + adfhklor + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods32'; + +-- 954.data.MODS33-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ + + + + ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ + + + + !'()*-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz~ + + + 0123456789ABCDEF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, b --> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="uri"/> + + <xsl:variable name="str"> + <xsl:for-each select="marc:subfield"> + <xsl:if + test="(contains('adfklmors',@code) and (not(../marc:subfield[@code='n' or @code='p']) or (following-sibling::marc:subfield[@code='n' or @code='p'])))"> + <xsl:value-of select="text()"/> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="substring($str,1,string-length($str)-1)"/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">ah</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + creator + + + + + + + + + + + + creator + + + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote-sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + offprint + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + + + + abcdef + - + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="marc:subfield[@code='a']"/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + + issn + + + + + + + + issn-l + + + + + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + + ba + ab + + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abe + + + + + + + + + u + + + + + + + + hijklmt + + + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + aacr2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfhklor</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + abcdefghijklmnopqrstuvwxyz + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Warning: string contains a character + that is out of range! Substituting "?". + 63 + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods33'; + +COMMIT; diff --git a/KCLS/sql/browse/combined/0025.kmain-806-separate_browse_entry_simple_heading_map.sql b/KCLS/sql/browse/combined/0025.kmain-806-separate_browse_entry_simple_heading_map.sql new file mode 100644 index 0000000000..ee79b557fd --- /dev/null +++ b/KCLS/sql/browse/combined/0025.kmain-806-separate_browse_entry_simple_heading_map.sql @@ -0,0 +1,115 @@ +-- Split metabib.browse_entry_simple_heading_map into 4 tables for author, series, subject, and title +-------------------------------------------------------------------------------------------------------- + +BEGIN; + +-- Create metabib.browse_author_entry_simple_heading_map +-- Table: metabib.browse_author_entry_simple_heading_map + +-- DROP TABLE metabib.browse_author_entry_simple_heading_map; + +CREATE TABLE metabib.browse_author_entry_simple_heading_map +( + id bigserial NOT NULL, + entry bigint, + simple_heading bigint +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_author_entry_simple_heading_map + OWNER TO evergreen; + +COMMIT; + + +--------------------------------------------------------------------------------------------------------- + +-- Create metabib.browse_series_entry_simple_heading_map +-- Table: metabib.browse_series_entry_simple_heading_map + +-- DROP TABLE metabib.browse_series_entry_simple_heading_map; + +BEGIN; + +CREATE TABLE metabib.browse_series_entry_simple_heading_map +( + id bigserial NOT NULL, + entry bigint, + simple_heading bigint +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_series_entry_simple_heading_map + OWNER TO evergreen; + +COMMIT; + + +--------------------------------------------------------------------------------------------------------- + +-- Create metabib.browse_subject_entry_simple_heading_map +-- Table: metabib.browse_subject_entry_simple_heading_map + +-- DROP TABLE metabib.browse_subject_entry_simple_heading_map; + +BEGIN; + +CREATE TABLE metabib.browse_subject_entry_simple_heading_map +( + id bigserial NOT NULL, + entry bigint, + simple_heading bigint +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_subject_entry_simple_heading_map + OWNER TO evergreen; + +COMMIT; + + +--------------------------------------------------------------------------------------------------------- + +-- Create metabib.browse_title_entry_simple_heading_map +-- Table: metabib.browse_title_entry_simple_heading_map + +-- DROP TABLE metabib.browse_title_entry_simple_heading_map; + +BEGIN; + +CREATE TABLE metabib.browse_title_entry_simple_heading_map +( + id bigserial NOT NULL, + entry bigint, + simple_heading bigint +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_title_entry_simple_heading_map + OWNER TO evergreen; + +COMMIT; + + +------------------------------------------------------------------------------------------------------- +-- Now that the new tables have been created, the following functions have been modified in the +-- combined browse scripts. +-- 003 -- +-- metabib.browse_author_authority_refs_pivot +-- metabib.browse_series_authority_refs_pivot +-- metabib.browse_subject_authority_refs_pivot +-- metabib.browse_title_authority_refs_pivot + +-- 007 -- +-- metabib.staged_browse +-- metabib.browse +-- authority.indexing_ingest_or_delete -- updates data when authority records changed +-- -- ongoing updates to the 4 new tables + +-- 001 -- +-- Removed the creation of the being split up. +------------------------------------------------------------------------------------------------------- \ No newline at end of file diff --git a/KCLS/sql/browse/combined/0026.kmain-762-public_replace_ampersand.sql b/KCLS/sql/browse/combined/0026.kmain-762-public_replace_ampersand.sql new file mode 100644 index 0000000000..4dd4a9c4a7 --- /dev/null +++ b/KCLS/sql/browse/combined/0026.kmain-762-public_replace_ampersand.sql @@ -0,0 +1,16 @@ +BEGIN; +-- Function: public.replace_ampersand(text) + +-- DROP FUNCTION public.replace_ampersand(text); + +CREATE OR REPLACE FUNCTION public.replace_ampersand(text) + RETURNS text AS +$BODY$ + SELECT REGEXP_REPLACE( $1, '&|&', 'and', 'g' ); +$BODY$ + LANGUAGE sql IMMUTABLE STRICT + COST 100; +ALTER FUNCTION public.replace_ampersand(text) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/003.split_up_browse_into_tables.sql b/KCLS/sql/browse/combined/003.split_up_browse_into_tables.sql new file mode 100644 index 0000000000..8a20f41629 --- /dev/null +++ b/KCLS/sql/browse/combined/003.split_up_browse_into_tables.sql @@ -0,0 +1,852 @@ +BEGIN; +-------------------AUTHOR-------------------- +-- STEP 1: Create entry table + +-- Table: metabib.browse_author_entry + +-- DROP TABLE metabib.browse_author_entry_def_map; + +-- DROP TABLE metabib.browse_author_entry; + +CREATE TABLE metabib.browse_author_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_author_entry + OWNER TO evergreen; + +-- Index: metabib.browse_author_entry_sort_value_idx + +-- DROP INDEX metabib.browse_author_entry_sort_value_idx; + +CREATE INDEX browse_author_entry_sort_value_idx + ON metabib.browse_author_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_author_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_author_entry_truncated_sort_value_idx; + +CREATE INDEX browse_author_entry_truncated_sort_value_idx + ON metabib.browse_author_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_author_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_author_entry_index_vector_idx; + +CREATE INDEX metabib_browse_author_entry_index_vector_idx + ON metabib.browse_author_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_author_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_author_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_author_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_author_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_author_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'author'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_author_entry +ADD CONSTRAINT browse_author_entry_pkey PRIMARY KEY (id); + +ALTER TABLE IF EXISTS metabib.browse_author_entry + ADD CONSTRAINT browse_author_entry_sort_value_value_key UNIQUE(sort_value); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_author_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_author_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_author_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_author_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_author_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_author_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_author_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_author_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_author_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_author_entry_def_map_def_idx; + +CREATE INDEX browse_author_entry_def_map_def_idx + ON metabib.browse_author_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_author_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_author_entry_def_map_entry_idx; + +CREATE INDEX browse_author_entry_def_map_entry_idx + ON metabib.browse_author_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_author_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_author_entry_def_map_source_idx; + +CREATE INDEX browse_author_entry_def_map_source_idx + ON metabib.browse_author_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_author_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'author'); + +-- Add a truncated_sort_value if one does not exist +UPDATE metabib.browse_author_entry +SET truncated_sort_value = substr(sort_value, 1, 2700) +WHERE truncated_sort_value IS NULL; + + +-----------------SERIES TITLES--------------------- +-- STEP 1: Create entry table + +-- Table: metabib.browse_series_entry + +-- DROP TABLE metabib.browse_series_entry_def_map; + +-- DROP TABLE metabib.browse_series_entry; + +CREATE TABLE metabib.browse_series_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_series_entry + OWNER TO evergreen; + +-- Index: metabib.browse_series_entry_sort_value_idx + +-- DROP INDEX metabib.browse_series_entry_sort_value_idx; + +CREATE INDEX browse_series_entry_sort_value_idx + ON metabib.browse_series_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_series_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_series_entry_truncated_sort_value_idx; + +CREATE INDEX browse_series_entry_truncated_sort_value_idx + ON metabib.browse_series_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_series_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_series_entry_index_vector_idx; + +CREATE INDEX metabib_browse_series_entry_index_vector_idx + ON metabib.browse_series_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_series_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_series_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_series_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_series_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_series_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'series'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_series_entry +ADD CONSTRAINT browse_series_entry_pkey PRIMARY KEY (id); + +ALTER TABLE IF EXISTS metabib.browse_series_entry + ADD CONSTRAINT browse_series_entry_sort_value_value_key UNIQUE(sort_value); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_series_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_series_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_series_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_series_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_series_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_series_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_series_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_series_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_series_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_series_entry_def_map_def_idx; + +CREATE INDEX browse_series_entry_def_map_def_idx + ON metabib.browse_series_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_series_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_series_entry_def_map_entry_idx; + +CREATE INDEX browse_series_entry_def_map_entry_idx + ON metabib.browse_series_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_series_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_series_entry_def_map_source_idx; + +CREATE INDEX browse_series_entry_def_map_source_idx + ON metabib.browse_series_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_series_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'series'); + +-- Add a truncated_sort_value if one does not exist +UPDATE metabib.browse_series_entry +SET truncated_sort_value = substr(sort_value, 1, 2700) +WHERE truncated_sort_value IS NULL; + + +--------------------------SUBJECT------------------------ +-- STEP 1: Create entry table + +-- Table: metabib.browse_subject_entry + +-- DROP TABLE metabib.browse_subject_entry_def_map; + +-- DROP TABLE metabib.browse_subject_entry; + +CREATE TABLE metabib.browse_subject_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_subject_entry + OWNER TO evergreen; + +-- Index: metabib.browse_subject_entry_sort_value_idx + +-- DROP INDEX metabib.browse_subject_entry_sort_value_idx; + +CREATE INDEX browse_subject_entry_sort_value_idx + ON metabib.browse_subject_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_subject_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_subject_entry_truncated_sort_value_idx; + +CREATE INDEX browse_subject_entry_truncated_sort_value_idx + ON metabib.browse_subject_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_subject_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_subject_entry_index_vector_idx; + +CREATE INDEX metabib_browse_subject_entry_index_vector_idx + ON metabib.browse_subject_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_subject_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_subject_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_subject_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_subject_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_subject_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'subject'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_subject_entry +ADD CONSTRAINT browse_subject_entry_pkey PRIMARY KEY (id); + +ALTER TABLE IF EXISTS metabib.browse_subject_entry + ADD CONSTRAINT browse_subject_entry_sort_value_value_key UNIQUE(sort_value); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_subject_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_subject_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_subject_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_subject_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_subject_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_subject_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_subject_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_subject_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_subject_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_subject_entry_def_map_def_idx; + +CREATE INDEX browse_subject_entry_def_map_def_idx + ON metabib.browse_subject_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_subject_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_subject_entry_def_map_entry_idx; + +CREATE INDEX browse_subject_entry_def_map_entry_idx + ON metabib.browse_subject_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_subject_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_subject_entry_def_map_source_idx; + +CREATE INDEX browse_subject_entry_def_map_source_idx + ON metabib.browse_subject_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_subject_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'subject'); + +-- Add a truncated_sort_value if one does not exist +UPDATE metabib.browse_subject_entry +SET truncated_sort_value = substr(sort_value, 1, 2700) +WHERE truncated_sort_value IS NULL; + + +---------------------TITLE------------------------ +-- STEP 1: Create entry table + +-- Table: metabib.browse_title_entry + +-- DROP TABLE metabib.browse_title_entry_def_map; + +-- DROP TABLE metabib.browse_title_entry; + +CREATE TABLE metabib.browse_title_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_title_entry + OWNER TO evergreen; + +-- Index: metabib.browse_title_entry_sort_value_idx + +-- DROP INDEX metabib.browse_title_entry_sort_value_idx; + +CREATE INDEX browse_title_entry_sort_value_idx + ON metabib.browse_title_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_title_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_title_entry_truncated_sort_value_idx; + +CREATE INDEX browse_title_entry_truncated_sort_value_idx + ON metabib.browse_title_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_title_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_title_entry_index_vector_idx; + +CREATE INDEX metabib_browse_title_entry_index_vector_idx + ON metabib.browse_title_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_title_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_title_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_title_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_title_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_title_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'title'); + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_title_entry +ADD CONSTRAINT browse_title_entry_pkey PRIMARY KEY (id); + +ALTER TABLE IF EXISTS metabib.browse_title_entry + ADD CONSTRAINT browse_title_entry_sort_value_value_key UNIQUE(sort_value); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_title_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_title_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_title_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_title_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_title_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_title_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_title_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_title_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_title_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_title_entry_def_map_def_idx; + +CREATE INDEX browse_title_entry_def_map_def_idx + ON metabib.browse_title_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_title_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_title_entry_def_map_entry_idx; + +CREATE INDEX browse_title_entry_def_map_entry_idx + ON metabib.browse_title_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_title_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_title_entry_def_map_source_idx; + +CREATE INDEX browse_title_entry_def_map_source_idx + ON metabib.browse_title_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_title_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.field_class = 'title'); + + +-- Add a truncated_sort_value if one does not exist +UPDATE metabib.browse_title_entry +SET truncated_sort_value = substr(sort_value, 1, 2700) +WHERE truncated_sort_value IS NULL; + + +--======FUNCTIONS======-- +-- browse_authority_refs_pivot -------------------------------------------------------------------- +--- FAIL DROP FUNCTION metabib.browse_authority_refs_pivot(integer[], text); + +-----AUTHOR---- +CREATE OR REPLACE FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbae.id + FROM metabib.browse_author_entry mbae + JOIN metabib.browse_author_entry_simple_heading_map mbaeshm ON ( mbaeshm.entry = mbae.id ) + JOIN authority.simple_heading ash ON ( mbaeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbae.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbae.sort_value), public.replace_ampersand(mbae.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-------TITLE------ +CREATE OR REPLACE FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbte.id + FROM metabib.browse_title_entry mbte + JOIN metabib.browse_title_entry_simple_heading_map mbteshm ON ( mbteshm.entry = mbte.id ) + JOIN authority.simple_heading ash ON ( mbteshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbte.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbte.sort_value), public.replace_ampersand(mbte.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +------SUBJECT------ +CREATE OR REPLACE FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbse.id + FROM metabib.browse_subject_entry mbse + JOIN metabib.browse_subject_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbse.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbse.sort_value), public.replace_ampersand(mbse.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + +-----SERIES TITLE------ +CREATE OR REPLACE FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbse.id + FROM metabib.browse_series_entry mbse + JOIN metabib.browse_series_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbse.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbse.sort_value), public.replace_ampersand(mbse.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + + +-- metabib.browse_bib_pivot(integer[], text) ------------------------------------------------------ +-- DROP FUNCTION metabib.browse_bib_pivot(integer[], text); + +------AUTHOR------ +CREATE OR REPLACE FUNCTION metabib.browse_author_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbe.id + FROM metabib.browse_author_entry mbe + JOIN metabib.browse_author_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_bib_pivot(integer[], text) + OWNER TO evergreen; + +-----TITLE----- +CREATE OR REPLACE FUNCTION metabib.browse_title_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_title_entry mbe + JOIN metabib.browse_title_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_bib_pivot(integer[], text) + OWNER TO evergreen; + +-----SUBJECT----- +CREATE OR REPLACE FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbe.id + FROM metabib.browse_subject_entry mbe + JOIN metabib.browse_subject_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + OWNER TO evergreen; + +-------SERIES TITLE------ +CREATE OR REPLACE FUNCTION metabib.browse_series_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbe.id + FROM metabib.browse_series_entry mbe + JOIN metabib.browse_series_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_bib_pivot(integer[], text) + OWNER TO evergreen; + + +-- metabib.browse_pivot -------------------------------------------------------------------- +-- DROP FUNCTION metabib.browse_pivot(integer[], text); + +-----AUTHOR----- +CREATE OR REPLACE FUNCTION metabib.browse_author_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT id FROM metabib.browse_author_entry + WHERE id IN ( + metabib.browse_author_bib_pivot(search_field, browse_term), + metabib.browse_author_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_pivot(integer[], text) + OWNER TO evergreen; + +-----TITLE----- +CREATE OR REPLACE FUNCTION metabib.browse_title_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT id FROM metabib.browse_title_entry + WHERE id IN ( + metabib.browse_title_bib_pivot(search_field, browse_term), + metabib.browse_title_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_pivot(integer[], text) + OWNER TO evergreen; + +-----SUBJECT----- +CREATE OR REPLACE FUNCTION metabib.browse_subject_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT id FROM metabib.browse_subject_entry + WHERE id IN ( + metabib.browse_subject_bib_pivot(search_field, browse_term), + metabib.browse_subject_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_pivot(integer[], text) + OWNER TO evergreen; + +-----SERIES TITLE----- +CREATE OR REPLACE FUNCTION metabib.browse_series_pivot(search_field integer[], browse_term text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT id FROM metabib.browse_series_entry + WHERE id IN ( + metabib.browse_series_bib_pivot(search_field, browse_term), + metabib.browse_series_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_pivot(integer[], text) + OWNER TO evergreen; + + +COMMIT; diff --git a/KCLS/sql/browse/combined/004.naco_normalize_keep_decimal.sql b/KCLS/sql/browse/combined/004.naco_normalize_keep_decimal.sql new file mode 100644 index 0000000000..4f735afa2c --- /dev/null +++ b/KCLS/sql/browse/combined/004.naco_normalize_keep_decimal.sql @@ -0,0 +1,61 @@ +BEGIN; +-- KMAIN-1271 Specialized normalize to retain decimals. +-- Function: public.naco_normalize_keep_decimal(text, text) + +-- DROP FUNCTION public.naco_normalize_keep_decimal(text, text); + +CREATE OR REPLACE FUNCTION public.naco_normalize_keep_decimal(text, text) + RETURNS text AS +$BODY$ use strict; + use Unicode::Normalize; + use Encode; + + my $str = decode_utf8(shift); + my $sf = shift; + + # Altered version of the naco_normalize function + + $str = uc $str; + + # remove non-filing strings + $str =~ s/\x{0098}.*?\x{009C}//g; + + $str = NFKD($str); + + # additional substitutions - 3.6. + $str =~ s/\x{00C6}/AE/g; + $str =~ s/\x{00DE}/TH/g; + $str =~ s/\x{0152}/OE/g; + $str =~ tr/\x{0110}\x{00D0}\x{00D8}\x{0141}\x{2113}\x{02BB}\x{02BC}]['/DDOLl/d; + # transformations based on Unicode category codes + $str =~ s/[\p{Cc}\p{Cf}\p{Co}\p{Cs}\p{Lm}\p{Mc}\p{Me}\p{Mn}]//g; + + if ($sf) {$str =~ s/,/\x{009F}/;} + + $str =~ s/\'//g; + $str =~ s/\,//g; + $str =~ s/\'//g; + # since we`ve stripped out the control characters, we can now + # use a few as placeholders temporarily + $str =~ tr/+&@\x{266D}\x{266F}#/\x01\x02\x03\x04\x05\x06/; + + $str =~ tr/\x01\x02\x03\x04\x05\x06\x07/+&@\x{266D}\x{266F}#,/; + + # intentionally skipping step 8 of the NACO algorithm; if the string + # gets normalized away, that`s fine. + + # leading and trailing spaces + $str =~ s/\s+/ /g; + $str =~ s/^\s+//; + $str =~ s/\s+$//g; + + $str =~ s/\x{009F}/,/; #Put comma back + + return lc $str; +$BODY$ + LANGUAGE plperlu IMMUTABLE STRICT + COST 100; +ALTER FUNCTION public.naco_normalize_keep_decimal(text, text) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/005.call_number_browse.sql b/KCLS/sql/browse/combined/005.call_number_browse.sql new file mode 100644 index 0000000000..1d1fb19521 --- /dev/null +++ b/KCLS/sql/browse/combined/005.call_number_browse.sql @@ -0,0 +1,278 @@ +BEGIN; + +-- Table: metabib.browse_call_number_entry + +-- DROP TABLE metabib.browse_call_number_entry_def_map; + +-- DROP TABLE metabib.browse_call_number_entry; + +CREATE TABLE metabib.browse_call_number_entry +( + id bigserial NOT NULL, + value text, + index_vector tsvector, + sort_value text NOT NULL, + truncated_sort_value text +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_call_number_entry + OWNER TO evergreen; + +-- Index: metabib.browse_call_number_entry_sort_value_idx + +-- DROP INDEX metabib.browse_call_number_entry_sort_value_idx; + +CREATE INDEX browse_call_number_entry_sort_value_idx + ON metabib.browse_call_number_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default" ); + +-- Index: metabib.browse_call_number_entry_truncated_sort_value_idx + +-- DROP INDEX metabib.browse_call_number_entry_truncated_sort_value_idx; + +CREATE INDEX browse_call_number_entry_truncated_sort_value_idx + ON metabib.browse_call_number_entry + USING btree + (truncated_sort_value COLLATE pg_catalog."default" ); + +-- Index: metabib.metabib_browse_call_number_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_browse_call_number_entry_index_vector_idx; + +CREATE INDEX metabib_browse_call_number_entry_index_vector_idx + ON metabib.browse_call_number_entry + USING gin + (index_vector ); + + +-- Trigger: metabib_browse_call_number_entry_fti_trigger on metabib.browse_entry + +-- DROP TRIGGER metabib_browse_call_number_entry_fti_trigger ON metabib.browse_entry; + +CREATE TRIGGER metabib_browse_call_number_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.browse_call_number_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('keyword'); + +-- STEP 2: Populate entry table + +INSERT INTO metabib.browse_call_number_entry +(id, value, index_vector, sort_value, truncated_sort_value) +(SELECT DISTINCT mbe.id, mbe.value, mbe.index_vector, mbe.sort_value, mbe.truncated_sort_value +FROM metabib.browse_entry mbe +JOIN metabib.browse_entry_def_map mbedm ON (mbe.id = mbedm.entry) +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.id = 25); -- identifier, bibcn + +-- STEP 3: Add constraint to table + +ALTER TABLE IF EXISTS metabib.browse_call_number_entry +ADD CONSTRAINT browse_call_number_entry_pkey PRIMARY KEY (id); + +ALTER TABLE IF EXISTS metabib.browse_call_number_entry + ADD CONSTRAINT browse_call_number_entry_sort_value_value_key UNIQUE(sort_value); + +-- STEP 4: Create def_map table + +-- Table: metabib.browse_entry_def_map + +CREATE TABLE metabib.browse_call_number_entry_def_map +( + id bigserial NOT NULL, + entry bigint, + def integer, + source bigint, + authority bigint, + CONSTRAINT browse_call_number_entry_def_map_pkey PRIMARY KEY (id ), + CONSTRAINT browse_call_number_entry_def_map_authority_fkey FOREIGN KEY (authority) + REFERENCES authority.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE SET NULL, + CONSTRAINT browse_call_number_entry_def_map_def_fkey FOREIGN KEY (def) + REFERENCES config.metabib_field (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_call_number_entry_def_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_call_number_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT browse_call_number_entry_def_map_source_fkey FOREIGN KEY (source) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.browse_call_number_entry_def_map + OWNER TO evergreen; + +-- Index: metabib.browse_call_number_entry_def_map_def_idx + +-- DROP INDEX metabib.browse_call_number_entry_def_map_def_idx; + +CREATE INDEX browse_call_number_entry_def_map_def_idx + ON metabib.browse_call_number_entry_def_map + USING btree + (def ); + +-- Index: metabib.browse_call_number_entry_def_map_entry_idx + +-- DROP INDEX metabib.browse_call_number_entry_def_map_entry_idx; + +CREATE INDEX browse_call_number_entry_def_map_entry_idx + ON metabib.browse_call_number_entry_def_map + USING btree + (entry ); + +-- Index: metabib.browse_call_number_entry_def_map_source_idx + +-- DROP INDEX metabib.browse_call_number_entry_def_map_source_idx; + +CREATE INDEX browse_call_number_entry_def_map_source_idx + ON metabib.browse_call_number_entry_def_map + USING btree + (source ); + +-- STEP 5: Populate def_map table + +INSERT INTO metabib.browse_call_number_entry_def_map +(id, entry, def, source, authority) +(SELECT mbedm.id, mbedm.entry, mbedm.def, mbedm.source, mbedm.authority +FROM metabib.browse_entry_def_map mbedm +JOIN config.metabib_field cmf ON (mbedm.def = cmf.id) +WHERE cmf.id = 25); -- identifier, bibcn + + +-- STEP 6: metabib.call_number_field_entry + +-- DROP TABLE metabib.call_number_field_entry; + +CREATE TABLE metabib.call_number_field_entry +( + id bigserial NOT NULL, + source bigint NOT NULL, + field integer NOT NULL, + value text NOT NULL, + index_vector tsvector NOT NULL, + CONSTRAINT call_number_field_entry_pkey PRIMARY KEY (id ) +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.call_number_field_entry + OWNER TO evergreen; +GRANT ALL ON TABLE metabib.call_number_field_entry TO evergreen; +GRANT ALL ON TABLE metabib.call_number_field_entry TO bbonner; +GRANT SELECT ON TABLE metabib.call_number_field_entry TO biblio; + +-- Index: metabib.metabib_call_number_field_entry_index_vector_idx + +-- DROP INDEX metabib.metabib_call_number_field_entry_index_vector_idx; + +CREATE INDEX metabib_call_number_field_entry_index_vector_idx + ON metabib.call_number_field_entry + USING gist + (index_vector ); + +-- Index: metabib.metabib_call_number_field_entry_source_idx + +-- DROP INDEX metabib.metabib_call_number_field_entry_source_idx; + +CREATE INDEX metabib_call_number_field_entry_source_idx + ON metabib.call_number_field_entry + USING btree + (source ); + +-- Index: metabib.metabib_call_number_field_entry_value_idx + +-- DROP INDEX metabib.metabib_call_number_field_entry_value_idx; + +CREATE INDEX metabib_call_number_field_entry_value_idx + ON metabib.call_number_field_entry + USING btree + ("substring"(value, 1, 1024) COLLATE pg_catalog."default" ) + WHERE index_vector = ''::tsvector; + + +-- Trigger: metabib_call_number_field_entry_fti_trigger on metabib.call_number_field_entry + +-- DROP TRIGGER metabib_call_number_field_entry_fti_trigger ON metabib.call_number_field_entry; + +CREATE TRIGGER metabib_call_number_field_entry_fti_trigger + BEFORE INSERT OR UPDATE + ON metabib.call_number_field_entry + FOR EACH ROW + EXECUTE PROCEDURE public.oils_tsearch2('call_number'); + +-- Trigger: normalize_field_entry on metabib.call_number_field_entry + +-- DROP TRIGGER normalize_field_entry ON metabib.call_number_field_entry; + +CREATE TRIGGER normalize_field_entry + AFTER INSERT OR UPDATE + ON metabib.call_number_field_entry + FOR EACH ROW + EXECUTE PROCEDURE metabib.normalized_field_entry_view(); +ALTER TABLE metabib.call_number_field_entry DISABLE TRIGGER normalize_field_entry; + + +-- STEP 7: set call_number as browsable + +UPDATE config.metabib_field + SET browse_field=true + WHERE field_class = 'identifier' AND name = 'bibcn'; + + +-- browse_authority_refs_pivot -------------------------------------------------------------------- +-- call_number +CREATE OR REPLACE FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + RETURNS bigint AS +$BODY$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or browse term + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= public.naco_normalize_keep_decimal($1, '') + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + OWNER TO evergreen; + + +-- metabib.browse_bib_pivot(integer[], text) ------------------------------------------------------ +-- call_number +CREATE OR REPLACE FUNCTION metabib.browse_call_number_bib_pivot(text) + RETURNS bigint AS +$BODY$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or browse term + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= public.naco_normalize_keep_decimal($1, '') + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_bib_pivot(text) + OWNER TO evergreen; + + +-- browse_pivot -------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION metabib.browse_call_number_pivot(browse_term text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT id FROM metabib.browse_call_number_entry + WHERE id IN ( + metabib.browse_call_number_bib_pivot(browse_term), + metabib.browse_call_number_authority_refs_pivot(browse_term) + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_pivot(text) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/006.data-type-etc.sql b/KCLS/sql/browse/combined/006.data-type-etc.sql new file mode 100644 index 0000000000..206a680477 --- /dev/null +++ b/KCLS/sql/browse/combined/006.data-type-etc.sql @@ -0,0 +1,90 @@ +BEGIN; + +--TYPE: metabib.flat_browse_entry_appearance +CREATE TYPE metabib.flat_browse_entry_appearance AS ( + browse_entry BIGINT, + value TEXT, + fields TEXT, + authorities TEXT, + sources INT, -- visible ones, that is + row_number INT, -- internal use, sort of + accurate BOOL, -- Count in sources field is accurate? Not + -- if we had more than a browse superpage + -- of records to look at. + pivot_point BIGINT, + sees TEXT, + asources INT, + aaccurate TEXT +); + + +--DATA: config.org_unit_setting_type +INSERT INTO config.org_unit_setting_type ( + name, label, grp, description, datatype +) VALUES ( + 'opac.browse.pager_shortcuts', + 'Paging shortcut links for OPAC Browse', + 'opac', + 'The characters in this string, in order, will be used as shortcut links for quick paging in the OPAC browse interface. Any sequence surrounded by asterisks will be taken as a whole label, not split into individual labels at the character level, but only the first character will serve as the basis of the search.', + 'string' +); + + +--DATA: authority.control_set_bib_field_metabib_field_map +-- Map between authority controlled bib fields and stock indexing metabib fields +INSERT INTO authority.control_set_bib_field_metabib_field_map (bib_field, metabib_field) + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '100' AND m.name = 'personal' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '110' AND m.name = 'corporate' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '111' AND m.name = 'conference' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '130' AND m.name = 'uniform' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '148' AND m.name = 'temporal' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '150' AND m.name = 'topic' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '151' AND m.name = 'geographic' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '155' AND m.name = 'genre' -- Just in case... +; + + +--DATA: authority.control_set_authority_field +-- No 4XX inter-authority linking +UPDATE authority.control_set_authority_field SET linking_subfield = NULL; +UPDATE authority.control_set_authority_field SET linking_subfield = '0' WHERE tag LIKE ANY (ARRAY['5%','7%']); + + +COMMIT; diff --git a/KCLS/sql/browse/combined/007.functions.sql b/KCLS/sql/browse/combined/007.functions.sql new file mode 100644 index 0000000000..9641aae208 --- /dev/null +++ b/KCLS/sql/browse/combined/007.functions.sql @@ -0,0 +1,1791 @@ +BEGIN; + +---------authority.calculate_authority_linking------------ +-- Given an authority record's ID, control set ID (if known), and marc::XML, +-- return all links to other authority records in the form of rows that +-- can be inserted into authority.authority_linking. +CREATE OR REPLACE FUNCTION authority.calculate_authority_linking( + rec_id BIGINT, rec_control_set INT, rec_marc_xml XML +) RETURNS SETOF authority.authority_linking AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + link TEXT; + aal authority.authority_linking%ROWTYPE; +BEGIN + -- ver s + IF rec_control_set IS NULL THEN + -- No control_set on record? Guess at one + SELECT control_set INTO rec_control_set + FROM authority.control_set_authority_field + WHERE tag IN ( + SELECT UNNEST( + XPATH('//*[starts-with(@tag,"1")]/@tag',rec_marc_xml)::TEXT[] + ) + ) LIMIT 1; + + IF NOT FOUND THEN + RAISE WARNING 'Could not even guess at control set for authority record %', rec_id; + RETURN; + END IF; + END IF; + + aal.source := rec_id; + + FOR acsaf IN + SELECT * FROM authority.control_set_authority_field + WHERE control_set = rec_control_set + AND linking_subfield IS NOT NULL + AND main_entry IS NOT NULL + LOOP + -- Loop over the trailing-number contents of all linking subfields + FOR link IN + SELECT SUBSTRING( x::TEXT, '\d+$' ) + FROM UNNEST( + XPATH( + '//*[@tag="' + || acsaf.tag + || '"]/*[@code="' + || acsaf.linking_subfield + || '"]/text()', + rec_marc_xml + ) + ) x + LOOP + + -- Ignore links that are null, malformed, circular, or point to + -- non-existent authority records. + IF link IS NOT NULL AND link::BIGINT <> rec_id THEN + PERFORM * FROM authority.record_entry WHERE id = link::BIGINT; + IF FOUND THEN + aal.target := link::BIGINT; + aal.field := acsaf.id; + RETURN NEXT aal; + END IF; + END IF; + END LOOP; + END LOOP; +END; +$func$ LANGUAGE PLPGSQL; + +-- KMAIN-1521 +CREATE OR REPLACE FUNCTION authority.heading_changed(old_marc text, new_marc text) + RETURNS boolean AS +$BODY$ + +DECLARE + old_heading text; + new_heading text; +BEGIN + -- Grab the 1XX field of each authority marc. + old_heading := substring(old_marc from ''); + new_heading := substring(new_marc from ''); + + IF old_heading = new_heading THEN + RETURN FALSE; + ELSE + RETURN TRUE; + END IF; + + END $BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION authority.heading_changed(text, text) + OWNER TO evergreen; + +-- KMAIN-1312 +CREATE OR REPLACE FUNCTION metabib.set_export_date(bib_id bigint, input_date date) + RETURNS void AS +$BODY$ +BEGIN + PERFORM * FROM metabib.bib_export_data WHERE bib = bib_id; + IF FOUND THEN + UPDATE metabib.bib_export_data SET export_date = input_date + WHERE bib = bib_id; + ELSE + INSERT INTO metabib.bib_export_data (bib, export_date) + VALUES (bib_id, input_date); + END IF; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.set_export_date(bigint, date) + OWNER TO evergreen; + +-- KMAIN-1324 +CREATE OR REPLACE FUNCTION metabib.set_import_date(bib_id bigint, input_date date) + RETURNS void AS +$BODY$ +BEGIN + PERFORM * FROM metabib.bib_export_data WHERE bib = bib_id; + IF FOUND THEN + UPDATE metabib.bib_export_data SET import_date = input_date + WHERE bib = bib_id; + ELSE + INSERT INTO metabib.bib_export_data (bib, import_date) + VALUES (bib_id, input_date); + END IF; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.set_import_date(bigint, date) + OWNER TO evergreen; + +-- KMAIN-821 +-- Adding a function to replace authority.simple_heading_set(text) to give back the original marc field also. +-- Function: authority.simple_heading_set(text) + +-- DROP FUNCTION authority.simple_heading_set(text); + +CREATE OR REPLACE FUNCTION authority.simple_heading_plus_set(marcxml text) + RETURNS SETOF authority.simple_heading_plus AS +$BODY$ +DECLARE + res authority.simple_heading_plus%ROWTYPE; + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + cset INT; + heading_text TEXT; + original_text TEXT; + joiner_text TEXT; + sort_text TEXT; + tmp_text TEXT; + tmp_xml TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + res.record := auth_id; + + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset LOOP + + res.atag := acsaf.id; + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + joiner_text := COALESCE(acsaf.joiner, ' '); + + FOR tmp_xml IN SELECT UNNEST(XPATH('//*[@tag="'||tag_used||'"]', marcxml::XML)) LOOP + + heading_text := public.naco_normalize(COALESCE( + oils_xpath_string('//subfield[contains("'||acsaf.sf_list||'",@code)]', tmp_xml::TEXT, joiner_text), + '')); + + original_text = COALESCE( + oils_xpath_string('//subfield[contains("'||acsaf.display_sf_list||'",@code)]', tmp_xml::TEXT), + ''); + + original_text = REGEXP_REPLACE(original_text, '^\s+|\s+$/', '','g'); + + IF nfi_used IS NOT NULL THEN + + sort_text := SUBSTRING( + heading_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tmp_xml::TEXT), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + + ELSE + sort_text := heading_text; + END IF; + + IF heading_text IS NOT NULL AND heading_text <> '' THEN + res.value := heading_text; + res.sort_value := public.naco_normalize(sort_text); + res.index_vector = to_tsvector('keyword'::regconfig, res.sort_value); + res.original_text := original_text; + RETURN NEXT res; + END IF; + + END LOOP; + + END LOOP; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql IMMUTABLE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.simple_heading_plus_set(text) + OWNER TO evergreen; + + +-------------authority.indexing_ingest_or_delete---------------- +-- AFTER UPDATE OR INSERT trigger for authority.record_entry +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; + are_row authority.record_entry%ROWTYPE; + bre_row biblio.record_entry%ROWTYPE; +BEGIN + --ver 2.1 KMAIN-1119 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + -- Remove the actual linking subfields present in + -- marc bib records that is controlled by this one + FOR bre_row IN SELECT * FROM biblio.record_entry bre + INNER JOIN authority.bib_linking abl + ON bre.id = abl.bib AND abl.authority = NEW.id LOOP + + UPDATE biblio.record_entry + SET marc = (SELECT regexp_replace(bre_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = bre_row.id; + + END LOOP; + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Remove the actual linking subfields present in + -- authority records that target this one + FOR are_row IN SELECT * FROM authority.record_entry auth + INNER JOIN authority.authority_linking aal + ON auth.id = aal.source AND aal.target = NEW.id LOOP + + UPDATE authority.record_entry + SET marc = (SELECT regexp_replace(are_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = are_row.id; + + END LOOP; + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- KMAIN-1521 + -- Did the authority heading change? No need to propagate to bibs if the heading did not change + IF authority.heading_changed(OLD.marc, NEW.marc) THEN + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + END IF; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; + + +------------authority.normalize_heading------------- +CREATE OR REPLACE FUNCTION authority.normalize_heading( marcxml TEXT, no_thesaurus BOOL ) RETURNS TEXT AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + sf_node TEXT; + tag_node TEXT; + thes_code TEXT; + cset INT; + heading_text TEXT; + tmp_text TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + --ver1.0 + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + thes_code := vandelay.marc21_extract_fixed_field(marcxml,'Subj'); + IF thes_code IS NULL THEN + thes_code := '|'; + ELSIF thes_code = 'z' THEN + thes_code := COALESCE( oils_xpath_string('//*[@tag="040"]/*[@code="f"][1]', marcxml), '' ); + END IF; + + heading_text := ''; + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset AND main_entry IS NULL LOOP + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + first_sf := TRUE; + + FOR tag_node IN SELECT unnest(oils_xpath('//*[@tag="'||tag_used||'"]',marcxml)) LOOP + FOR sf_node IN SELECT unnest(oils_xpath('./*[contains("'||acsaf.sf_list||'",@code)]',tag_node)) LOOP + + tmp_text := oils_xpath_string('.', sf_node); + sf := oils_xpath_string('./@code', sf_node); + + IF first_sf AND tmp_text IS NOT NULL AND nfi_used IS NOT NULL THEN + + tmp_text := SUBSTRING( + tmp_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tag_node), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + + END IF; + + first_sf := FALSE; + + IF tmp_text IS NOT NULL AND tmp_text <> '' THEN + heading_text := heading_text || E'\u2021' || sf || ' ' || tmp_text; + END IF; + END LOOP; + + EXIT WHEN heading_text <> ''; + END LOOP; + + EXIT WHEN heading_text <> ''; + END LOOP; + + IF heading_text <> '' THEN + IF no_thesaurus IS TRUE THEN + heading_text := tag_used || ' ' || public.naco_normalize(heading_text); + ELSE + heading_text := tag_used || '_' || COALESCE(nfi_used,'-') || '_' || thes_code || ' ' || public.naco_normalize(heading_text); + END IF; + ELSE + heading_text := 'NOHEADING_' || thes_code || ' ' || MD5(marcxml); + END IF; + + RETURN heading_text; +END; +$func$ LANGUAGE PLPGSQL IMMUTABLE; + +--------authority.simple_heading_set--------------- +-- Function: authority.simple_heading_set(text) + +-- DROP FUNCTION authority.simple_heading_set(text); + +CREATE OR REPLACE FUNCTION authority.simple_heading_set(marcxml text) + RETURNS SETOF authority.simple_heading AS +$BODY$ +DECLARE + res authority.simple_heading%ROWTYPE; + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + cset INT; + heading_text TEXT; + joiner_text TEXT; + sort_text TEXT; + tmp_text TEXT; + tmp_xml TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + --ver1.0 + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + res.record := auth_id; + + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset LOOP + + res.atag := acsaf.id; + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + joiner_text := COALESCE(acsaf.joiner, ' '); + + FOR tmp_xml IN SELECT UNNEST(XPATH('//*[@tag="'||tag_used||'"]', marcxml::XML)) LOOP + + heading_text := COALESCE( + oils_xpath_string('./*[contains("'||acsaf.display_sf_list||'",@code)]', tmp_xml::TEXT, joiner_text), + '' + ); + IF nfi_used IS NOT NULL THEN + sort_text := SUBSTRING( + heading_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tmp_xml::TEXT), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + ELSE + sort_text := heading_text; + END IF; + IF heading_text IS NOT NULL AND heading_text <> '' THEN + res.value := heading_text; + res.sort_value := public.naco_normalize(sort_text); + res.index_vector = to_tsvector('keyword'::regconfig, res.sort_value); + RETURN NEXT res; + END IF; + END LOOP; + END LOOP; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql IMMUTABLE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.simple_heading_set(text) + OWNER TO evergreen; + + +-----------metabib.autosuggest_prepare_tsquery--------------- +CREATE OR REPLACE FUNCTION metabib.autosuggest_prepare_tsquery(orig TEXT) RETURNS TEXT[] AS +$$ +DECLARE + orig_ended_in_space BOOLEAN; + result RECORD; + plain TEXT; + normalized TEXT; +BEGIN + --ver1.0 + orig_ended_in_space := orig ~ E'\\s$'; + + orig := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(orig, E'\\W+'), ' ' + ); + + normalized := public.naco_normalize(orig); -- also trim()s + plain := trim(orig); + + IF NOT orig_ended_in_space THEN + plain := plain || ':*'; + normalized := normalized || ':*'; + END IF; + + plain := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(plain, E'\\s+'), ' & ' + ); + normalized := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(normalized, E'\\s+'), ' & ' + ); + + RETURN ARRAY[normalized, plain]; +END; +$$ LANGUAGE PLPGSQL; + + +------metabib.browse_authority_pivot-------- +CREATE OR REPLACE FUNCTION metabib.browse_authority_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + -- So far this function is not called. When its usage is known, depending on + -- how it is called/used we can use that information to modify it to use the new + -- broken apart tables for both metabib.browse_entry and metabib.browse_entry_simple_heading_map + --ver1.1 updated with kmain-806 - added note + SELECT mbe.id + FROM metabib.browse_entry mbe + -- JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + -- JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + -- JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + -- ash.atag = map.authority_field + -- AND map.metabib_field = ANY($1) + -- ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +ALTER FUNCTION metabib.browse_authority_pivot (integer[], text) STABLE; + + +-----------pg_temp.alternative_title_reingest-------------------- +-- The following function only appears in the upgrade script and not the +-- baseline schema because it's not necessary in the latter (and it's a +-- temporary function). It just serves to do a hopefully cheaper, more +-- focused reingest just to hit the alternative title index. + +-- This cribs from the guts of metabib.reingest_metabib_field_entries(), +-- and if it actually is a timesaver over a full reingest, then at some +-- point in the future it would be nice if we broke it out into a separate +-- function to make things like this easier. + +/* CREATE OR REPLACE FUNCTION pg_temp.alternative_title_reingest( bib_id BIGINT ) RETURNS VOID AS $func$ +DECLARE + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL := false; + b_skip_browse BOOL := false; + b_skip_search BOOL := false; + alt_title INT; + value_prepped TEXT; +BEGIN + --ver1.0 + SELECT INTO alt_title id FROM config.metabib_field WHERE field_class = 'title' AND name = 'alternative'; + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) WHERE field = alt_title LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + -- Avoid inserting duplicate rows, but retain granularity of being + -- able to search browse fields with "starts with" type operators + -- (for example, for titles of songs in music albums) + IF (ind_data.search_field OR ind_data.browse_field) AND NOT b_skip_search THEN + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +SELECT pg_temp.alternative_title_reingest(id) FROM biblio.record_entry WHERE NOT deleted; */ + + +------------vandelay.add_field------------------------ +CREATE OR REPLACE FUNCTION vandelay.add_field ( target_xml TEXT, source_xml TEXT, field TEXT, force_add INT ) RETURNS TEXT AS $_$ + #ver1.0 + use MARC::Record; + use MARC::File::XML (BinaryEncoding => 'UTF-8'); + use MARC::Charset; + use strict; + + MARC::Charset->assume_unicode(1); + + my $target_xml = shift; + my $source_xml = shift; + my $field_spec = shift; + my $force_add = shift || 0; + + my $target_r = MARC::Record->new_from_xml( $target_xml ); + my $source_r = MARC::Record->new_from_xml( $source_xml ); + + return $target_xml unless ($target_r && $source_r); + + my @field_list = split(',', $field_spec); + + my %fields; + for my $f (@field_list) { + $f =~ s/^\s*//; $f =~ s/\s*$//; + if ($f =~ /^(.{3})(\w*)(?:\[([^]]*)\])?$/) { + my $field = $1; + $field =~ s/\s+//; + my $sf = $2; + $sf =~ s/\s+//; + my $match = $3; + $match =~ s/^\s*//; $match =~ s/\s*$//; + $fields{$field} = { sf => [ split('', $sf) ] }; + if ($match) { + my ($msf,$mre) = split('~', $match); + if (length($msf) > 0 and length($mre) > 0) { + $msf =~ s/^\s*//; $msf =~ s/\s*$//; + $mre =~ s/^\s*//; $mre =~ s/\s*$//; + $fields{$field}{match} = { sf => $msf, re => qr/$mre/ }; + } + } + } + } + + for my $f ( keys %fields) { + if ( @{$fields{$f}{sf}} ) { + for my $from_field ($source_r->field( $f )) { + my @tos = $target_r->field( $f ); + if (!@tos) { + next if (exists($fields{$f}{match}) and !$force_add); + my @new_fields = map { $_->clone } $source_r->field( $f ); + $target_r->insert_fields_ordered( @new_fields ); + } else { + for my $to_field (@tos) { + if (exists($fields{$f}{match})) { + next unless (grep { $_ =~ $fields{$f}{match}{re} } $to_field->subfield($fields{$f}{match}{sf})); + } + for my $old_sf ($from_field->subfields) { + $to_field->add_subfields( @$old_sf ) if grep(/$$old_sf[0]/,@{$fields{$f}{sf}}); + } + } + } + } + } else { + my @new_fields = map { $_->clone } $source_r->field( $f ); + $target_r->insert_fields_ordered( @new_fields ); + } + } + + $target_xml = $target_r->as_xml_record; + $target_xml =~ s/^<\?.+?\?>$//mo; + $target_xml =~ s/\n//sgo; + $target_xml =~ s/>\s+ 0 THEN + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ind_data.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ind_data.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ind_data.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ind_data.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE sort_value = ind_data.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; + + +------------ biblio.extract_metabib_field_entry ----------------- +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry(rid bigint, default_joiner text) + RETURNS SETOF metabib.field_entry_template AS +$BODY$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + --ver2.0 - KMAIN-1162 + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + -- outer regexp_replace keeps all '.' expect the last one. + -- inner regexp_replace removes all connecting whitespace and replaces it with a single space + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g'), E'\\.$', '', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field = true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g'), E'\\.+', '', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + + IF idx.field_class = 'identifier' AND idx.name = 'bibcn' THEN + output_row.field_class = 'call_number'; + output_row.browse_field = TRUE; + output_row.sort_value = public.naco_normalize_keep_decimal(raw_text,''); + output_row.value = raw_text; + ELSE + output_row.field_class = idx.field_class; + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g'), E'\\.+', '', 'g')); + END IF; + + output_row.field = idx.id; + output_row.source = rid; + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION biblio.extract_metabib_field_entry(bigint, text) + OWNER TO evergreen; + + +--------------metabib.staged_browse------------------- +CREATE OR REPLACE FUNCTION metabib.staged_browse( + query text, fields integer[], + context_org integer, + context_locations integer[], + staff boolean, + browse_superpage_size integer, + count_up_from_zero boolean, + result_limit integer, + next_pivot_pos integer, + search_class text +) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; + unauthorized_entry RECORD; +BEGIN + --ver1.1 updated with kmain-806 - added support for the new metabib.browse_____entry_simple_heading_map tables. + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + CASE search_class + WHEN 'author' THEN + --Is unauthorized, i.e., 4xx on an auth record? + SELECT INTO unauthorized_entry * + FROM metabib.browse_author_entry_simple_heading_map mbeshm + INNER JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + INNER JOIN authority.control_set_authority_field acsaf ON ( acsaf.id = ash.atag AND acsaf.tag like '4__') + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + IF (unauthorized_entry.record IS NOT NULL) THEN + --Do unauthorized procedure, use the authorized term's auth record and it's bibs + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), + STRING_AGG(DISTINCT abl.authority::TEXT, $$,$$), + ARRAY_AGG(DISTINCT map.metabib_field) + FROM authority.bib_linking abl + INNER JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + map.authority_field = unauthorized_entry.atag + AND map.metabib_field = ANY(fields) + ) + WHERE abl.authority = unauthorized_entry.record; + ELSE + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_author_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'title' THEN + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_title_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_title_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'subject' THEN + --Is unauthorized, i.e., 4xx on an auth record? + SELECT INTO unauthorized_entry * + FROM metabib.browse_subject_entry_simple_heading_map mbeshm + INNER JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + INNER JOIN authority.control_set_authority_field acsaf ON ( acsaf.id = ash.atag AND acsaf.tag like '4__') + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + IF (unauthorized_entry.record IS NOT NULL) THEN + --Do unauthorized procedure, use the authorized term's auth record and it's bibs + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), + STRING_AGG(DISTINCT abl.authority::TEXT, $$,$$), + ARRAY_AGG(DISTINCT map.metabib_field) + FROM authority.bib_linking abl + INNER JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + map.authority_field = unauthorized_entry.atag + AND map.metabib_field = ANY(fields) + ) + WHERE abl.authority = unauthorized_entry.record; + ELSE + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_subject_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_subject_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'series' THEN + --Is unauthorized, i.e., 4xx on an auth record? + SELECT INTO unauthorized_entry * + FROM metabib.browse_series_entry_simple_heading_map mbeshm + INNER JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + INNER JOIN authority.control_set_authority_field acsaf ON ( acsaf.id = ash.atag AND acsaf.tag like '4__') + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + IF (unauthorized_entry.record IS NOT NULL) THEN + --Do unauthorized procedure, use the authorized term's auth record and it's bibs + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), + STRING_AGG(DISTINCT abl.authority::TEXT, $$,$$), + ARRAY_AGG(DISTINCT map.metabib_field) + FROM authority.bib_linking abl + INNER JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + map.authority_field = unauthorized_entry.atag + AND map.metabib_field = ANY(fields) + ) + WHERE abl.authority = unauthorized_entry.record; + ELSE + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_series_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_series_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'call_number' THEN + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_call_number_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + ELSE + + END CASE; + + + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + OWNER TO evergreen; + + +--metabib.browse +CREATE OR REPLACE FUNCTION metabib.browse( + search_class text, + browse_term text, + context_org integer DEFAULT NULL::integer, + context_loc_group integer DEFAULT NULL::integer, + staff boolean DEFAULT false, + pivot_id bigint DEFAULT NULL::bigint, + result_limit integer DEFAULT 10 +) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + --ver1.1 updated with kmain-806 + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + public.replace_ampersand(mbe.sort_value) + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + )'; + IF search_class != 'call_number' THEN + + core_query := core_query || ' OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + )'; + + END IF; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ') AND public.replace_ampersand(mbe.truncated_sort_value) <= ' || quote_literal(public.replace_ampersand(pivot_sort_value)) || + ' ORDER BY public.replace_ampersand(mbe.truncated_sort_value) DESC, public.replace_ampersand(mbe.value) DESC '; + + -- This variant browses forward. + forward_query := core_query || + ') AND public.replace_ampersand(mbe.truncated_sort_value) > ' || quote_literal(public.replace_ampersand(pivot_sort_value)) || + ' ORDER BY public.replace_ampersand(mbe.truncated_sort_value), public.replace_ampersand(mbe.value) '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + + +COMMIT; diff --git a/KCLS/sql/browse/combined/008.kmain-223-auth-match.sql b/KCLS/sql/browse/combined/008.kmain-223-auth-match.sql new file mode 100644 index 0000000000..2b60a4c1bf --- /dev/null +++ b/KCLS/sql/browse/combined/008.kmain-223-auth-match.sql @@ -0,0 +1,494 @@ +BEGIN; + +ALTER TABLE vandelay.authority_match +ADD match_score integer NOT NULL DEFAULT 0; + +DROP TYPE IF EXISTS matched_records CASCADE; +CREATE TYPE matched_records AS ( + create_date timestamp with time zone, + creator integer, + edit_date timestamp with time zone, + id bigint, + quality integer, + source integer, + tcn_source text, + tcn_value text, + _id text, + match_score integer, + match_quality integer +); + +CREATE OR REPLACE FUNCTION vandelay.get_matched_records(queued_bib bigint, bib_type text) + RETURNS SETOF matched_records AS +$BODY$ +DECLARE + m_record matched_records; +BEGIN + --ver1.0 + IF bib_type = 'bib' THEN + FOR m_record IN + SELECT create_date, creator, edit_date, bre.id, vqbr.quality, source, tcn_source, tcn_value, + 'null' as _id, vbm.match_score, vbm.quality as match_quality + FROM biblio.record_entry bre + INNER JOIN vandelay.bib_match vbm + ON vbm.eg_record = bre.id + INNER JOIN vandelay.queued_bib_record vqbr + ON vqbr.id = vbm.queued_record + WHERE vqbr.id = queued_bib + ORDER BY bre.id DESC + LOOP + RETURN NEXT m_record; + END LOOP; + ELSE + FOR m_record IN + SELECT create_date, creator, edit_date, are.id, vqar.quality, source, '' as tcn_source, '' as tcn_value, + 'null' as _id, vam.match_score, vam.quality as match_quality + FROM authority.record_entry are + INNER JOIN vandelay.authority_match vam + ON vam.eg_record = are.id + INNER JOIN vandelay.queued_authority_record vqar + ON vqar.id = vam.queued_record + WHERE vqar.id = queued_bib + ORDER BY are.id DESC + LOOP + RETURN NEXT m_record; + END LOOP; + END IF; + RETURN; +END; +$BODY$ + LANGUAGE plpgsql IMMUTABLE + COST 100 + ROWS 1000; +ALTER FUNCTION vandelay.get_matched_records(bigint, text) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay.auto_overlay_authority_record_with_best(import_id bigint, merge_profile_id integer, lwm_ratio_value_p numeric) + RETURNS boolean AS +$BODY$ +DECLARE + eg_id BIGINT; + lwm_ratio_value NUMERIC; +BEGIN + --ver1.0 + lwm_ratio_value := COALESCE(lwm_ratio_value_p, 0.0); + + PERFORM * FROM vandelay.queued_authority_record WHERE import_time IS NOT NULL AND id = import_id; + + IF FOUND THEN + RAISE NOTICE 'already imported, cannot auto-overlay'; + RETURN FALSE; + END IF; + + SELECT m.eg_record INTO eg_id + FROM vandelay.authority_match m + JOIN vandelay.queued_authority_record qr ON (m.queued_record = qr.id) + JOIN vandelay.authority_queue q ON (qr.queue = q.id) + JOIN authority.record_entry r ON (r.id = m.eg_record) + WHERE m.queued_record = import_id + AND qr.quality::NUMERIC / COALESCE(NULLIF(m.quality,0),1)::NUMERIC >= lwm_ratio_value + ORDER BY m.match_score DESC, -- required match score + qr.quality::NUMERIC / COALESCE(NULLIF(m.quality,0),1)::NUMERIC DESC, -- quality tie breaker + m.id -- when in doubt, use the first match + LIMIT 1; + + IF eg_id IS NULL THEN + RAISE NOTICE 'incoming record is not of high enough quality'; + RETURN FALSE; + END IF; + + RETURN vandelay.overlay_authority_record( import_id, eg_id, merge_profile_id ); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.auto_overlay_authority_record_with_best(bigint, integer, numeric) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION vandelay.match_auth_record() + RETURNS trigger AS +$BODY$ +DECLARE + incoming_existing_id TEXT; + test_result vandelay.match_set_test_result%ROWTYPE; + tmp_rec BIGINT; + match_set INT; +BEGIN + --ver1.0 + IF TG_OP IN ('INSERT','UPDATE') AND NEW.imported_as IS NOT NULL THEN + RETURN NEW; + END IF; + + DELETE FROM vandelay.authority_match WHERE queued_record = NEW.id; + + SELECT q.match_set INTO match_set FROM vandelay.authority_queue q WHERE q.id = NEW.queue; + + IF match_set IS NOT NULL THEN + NEW.quality := vandelay.measure_record_quality( NEW.marc, match_set ); + END IF; + + -- Perfect matches on 901$c exit early with a match with high quality. + incoming_existing_id := + oils_xpath_string('//*[@tag="901"]/*[@code="c"][1]', NEW.marc); + + IF incoming_existing_id IS NOT NULL AND incoming_existing_id != '' THEN + SELECT id INTO tmp_rec FROM authority.record_entry WHERE id = incoming_existing_id::bigint; + IF tmp_rec IS NOT NULL THEN + INSERT INTO vandelay.authority_match (queued_record, eg_record, match_score, quality) + SELECT + NEW.id, + b.id, + 9999, + vandelay.measure_record_quality( b.marc, match_set ) + FROM authority.record_entry b + WHERE id = incoming_existing_id::bigint; + END IF; + END IF; + + IF match_set IS NULL THEN + RETURN NEW; + END IF; + + FOR test_result IN SELECT * FROM + vandelay.match_set_test_marcxml_auth(match_set, NEW.marc) LOOP + + INSERT INTO vandelay.authority_match ( queued_record, eg_record, match_score, quality ) + SELECT + NEW.id, + test_result.record, + test_result.quality, + vandelay.measure_record_quality( b.marc, match_set ) + FROM authority.record_entry b + WHERE id = test_result.record; + + END LOOP; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.match_auth_record() + OWNER TO evergreen; + +CREATE TRIGGER zz_match_auth_trigger + BEFORE INSERT OR UPDATE + ON vandelay.queued_authority_record + FOR EACH ROW + EXECUTE PROCEDURE vandelay.match_auth_record(); + + +CREATE OR REPLACE FUNCTION vandelay.match_set_test_marcxml_auth(match_set_id integer, record_xml text) + RETURNS SETOF vandelay.match_set_test_result AS +$BODY$ +DECLARE + tags_rstore HSTORE; + svf_rstore HSTORE; + coal TEXT; + joins TEXT; + query_ TEXT; + wq TEXT; + qvalue INTEGER; + rec RECORD; +BEGIN + --ver1.0 + tags_rstore := vandelay.flatten_marc_hstore(record_xml); + svf_rstore := vandelay.extract_rec_attrs(record_xml); + + CREATE TEMPORARY TABLE _vandelay_tmp_qrows_auth (q INTEGER); + CREATE TEMPORARY TABLE _vandelay_tmp_jrows_auth (j TEXT); + + -- generate the where clause and return that directly (into wq), and as + -- a side-effect, populate the _vandelay_tmp_[qj]rows tables. + wq := vandelay.get_expr_from_match_set_auth(match_set_id, tags_rstore); + + query_ := 'SELECT DISTINCT(record), '; + + -- qrows table is for the quality bits we add to the SELECT clause + SELECT ARRAY_TO_STRING( + ARRAY_ACCUM('COALESCE(n' || q::TEXT || '.quality, 0)'), ' + ' + ) INTO coal FROM _vandelay_tmp_qrows_auth; + + -- our query string so far is the SELECT clause and the inital FROM. + -- no JOINs yet nor the WHERE clause + query_ := query_ || coal || ' AS quality ' || E'\n'; + + -- jrows table is for the joins we must make (and the real text conditions) + SELECT ARRAY_TO_STRING(ARRAY_ACCUM(j), E'\n') INTO joins + FROM _vandelay_tmp_jrows_auth; + + -- add those joins and the where clause to our query. + query_ := query_ || joins || E'\n' || 'JOIN authority.record_entry bre ON (bre.id = record) ' || 'WHERE ' || wq || ' AND not bre.deleted'; + + -- this will return rows of record,quality + FOR rec IN EXECUTE query_ USING tags_rstore, svf_rstore LOOP + RETURN NEXT rec; + END LOOP; + + DROP TABLE _vandelay_tmp_qrows_auth; + DROP TABLE _vandelay_tmp_jrows_auth; + RETURN; +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION vandelay.match_set_test_marcxml_auth(integer, text) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay.get_expr_from_match_set_point_auth(node vandelay.match_set_point, tags_rstore hstore) + RETURNS text AS +$BODY$ +DECLARE + q TEXT; + i INTEGER; + this_op TEXT; + children INTEGER[]; + child vandelay.match_set_point; +BEGIN + --ver1.0 + SELECT ARRAY_ACCUM(id) INTO children FROM vandelay.match_set_point + WHERE parent = node.id; + + IF ARRAY_LENGTH(children, 1) > 0 THEN + this_op := vandelay._get_expr_render_one(node); + q := '('; + i := 1; + WHILE children[i] IS NOT NULL LOOP + SELECT * INTO child FROM vandelay.match_set_point + WHERE id = children[i]; + IF i > 1 THEN + q := q || ' ' || this_op || ' '; + END IF; + i := i + 1; + q := q || vandelay.get_expr_from_match_set_point_auth(child, tags_rstore); + END LOOP; + q := q || ')'; + RETURN q; + ELSIF node.bool_op IS NULL THEN + PERFORM vandelay._get_expr_push_qrow_auth(node); + PERFORM vandelay._get_expr_push_jrow_auth(node, tags_rstore); + RETURN vandelay._get_expr_render_one(node); + ELSE + RETURN ''; + END IF; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.get_expr_from_match_set_point_auth(vandelay.match_set_point, hstore) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay.get_expr_from_match_set_auth(match_set_id integer, tags_rstore hstore) + RETURNS text AS +$BODY$ +DECLARE + root vandelay.match_set_point; +BEGIN + --ver1.0 + SELECT * INTO root FROM vandelay.match_set_point + WHERE parent IS NULL AND match_set = match_set_id; + + RETURN vandelay.get_expr_from_match_set_point_auth(root, tags_rstore); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.get_expr_from_match_set_auth(integer, hstore) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION vandelay._get_expr_push_qrow_auth(node vandelay.match_set_point) + RETURNS void AS +$BODY$ +DECLARE +BEGIN + --ver1.0 + INSERT INTO _vandelay_tmp_qrows_auth (q) VALUES (node.id); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay._get_expr_push_qrow_auth(vandelay.match_set_point) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay._get_expr_push_jrow_auth(node vandelay.match_set_point, tags_rstore hstore) + RETURNS void AS +$BODY$ +DECLARE + jrow TEXT; + my_alias TEXT; + op TEXT; + tagkey TEXT; + caseless BOOL; + jrow_count INT; + my_using TEXT; + my_join TEXT; +BEGIN + --ver1.0 + -- remember $1 is tags_rstore, and $2 is svf_rstore + + caseless := FALSE; + SELECT COUNT(*) INTO jrow_count FROM _vandelay_tmp_jrows_auth; + IF jrow_count > 0 THEN + my_using := ' USING (record)'; + my_join := 'FULL OUTER JOIN'; + ELSE + my_using := ''; + my_join := 'FROM'; + END IF; + + IF node.tag IS NOT NULL THEN + caseless := (node.tag IN ('020', '022', '024')); + tagkey := node.tag; + IF node.subfield IS NOT NULL THEN + tagkey := tagkey || node.subfield; + END IF; + END IF; + + IF node.negate THEN + IF caseless THEN + op := 'NOT LIKE'; + ELSE + op := '<>'; + END IF; + ELSE + IF caseless THEN + op := 'LIKE'; + ELSE + op := '='; + END IF; + END IF; + + my_alias := 'n' || node.id::TEXT; + + jrow := my_join || ' (SELECT *, '; + IF node.tag IS NOT NULL THEN + jrow := jrow || node.quality || + ' AS quality FROM authority.full_rec mfr WHERE mfr.tag = ''' || + node.tag || ''''; + IF node.subfield IS NOT NULL THEN + jrow := jrow || ' AND mfr.subfield = ''' || + node.subfield || ''''; + END IF; + jrow := jrow || ' AND ('; + jrow := jrow || vandelay._node_tag_comparisons(caseless, op, tags_rstore, tagkey); + jrow := jrow || ')) ' || my_alias || my_using || E'\n'; + ELSE -- svf + jrow := jrow || 'id AS record, ' || node.quality || + ' AS quality FROM metabib.record_attr mra WHERE mra.attrs->''' || + node.svf || ''' ' || op || ' $2->''' || node.svf || ''') ' || + my_alias || my_using || E'\n'; + END IF; + INSERT INTO _vandelay_tmp_jrows_auth (j) VALUES (jrow); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay._get_expr_push_jrow_auth(vandelay.match_set_point, hstore) + OWNER TO evergreen; + +-- Function: vandelay.extract_rec_attrs(text, text[]) + +-- DROP FUNCTION vandelay.extract_rec_attrs(text, text[]); + +CREATE OR REPLACE FUNCTION vandelay.extract_rec_attrs(xml text, attr_defs text[]) + RETURNS hstore AS +$BODY$ +DECLARE + transformed_xml TEXT; + prev_xfrm TEXT; + normalizer RECORD; + xfrm config.xml_transform%ROWTYPE; + attr_value TEXT; + new_attrs HSTORE := ''::HSTORE; + attr_def config.record_attr_definition%ROWTYPE; +BEGIN + --ver1.0 + FOR attr_def IN SELECT * FROM config.record_attr_definition WHERE name IN (SELECT * FROM UNNEST(attr_defs)) ORDER BY format LOOP + + IF attr_def.tag IS NOT NULL THEN -- tag (and optional subfield list) selection + SELECT ARRAY_TO_STRING(ARRAY_ACCUM(x.value), COALESCE(attr_def.joiner,' ')) INTO attr_value + FROM vandelay.flatten_marc(xml) AS x + WHERE x.tag LIKE attr_def.tag + AND CASE + WHEN attr_def.sf_list IS NOT NULL + THEN POSITION(x.subfield IN attr_def.sf_list) > 0 + ELSE TRUE + END + GROUP BY x.tag + ORDER BY x.tag + LIMIT 1; + + ELSIF attr_def.fixed_field IS NOT NULL THEN -- a named fixed field, see config.marc21_ff_pos_map.fixed_field + attr_value := vandelay.marc21_extract_fixed_field(xml, attr_def.fixed_field); + + ELSIF attr_def.xpath IS NOT NULL THEN -- and xpath expression + + SELECT INTO xfrm * FROM config.xml_transform WHERE name = attr_def.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(xml,xfrm.xslt); + ELSE + transformed_xml := xml; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + IF xfrm.name IS NULL THEN + -- just grab the marcxml (empty) transform + SELECT INTO xfrm * FROM config.xml_transform WHERE xslt = '---' LIMIT 1; + prev_xfrm := xfrm.name; + END IF; + + attr_value := oils_xpath_string(attr_def.xpath, transformed_xml, COALESCE(attr_def.joiner,' '), ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]]); + + ELSIF attr_def.phys_char_sf IS NOT NULL THEN -- a named Physical Characteristic, see config.marc21_physical_characteristic_*_map + SELECT m.value::TEXT INTO attr_value + FROM vandelay.marc21_physical_characteristics(xml) v + JOIN config.marc21_physical_characteristic_value_map m ON (m.id = v.value) + WHERE v.subfield = attr_def.phys_char_sf + LIMIT 1; -- Just in case ... + + END IF; + + -- apply index normalizers to attr_value + FOR normalizer IN + SELECT n.func AS func, + n.param_count AS param_count, + m.params AS params + FROM config.index_normalizer n + JOIN config.record_attr_index_norm_map m ON (m.norm = n.id) + WHERE attr = attr_def.name + ORDER BY m.pos LOOP + EXECUTE 'SELECT ' || normalizer.func || '(' || + quote_nullable( attr_value ) || + CASE + WHEN normalizer.param_count > 0 + THEN ',' || REPLACE(REPLACE(BTRIM(normalizer.params,'[]'),E'\'',E'\\\''),E'"',E'\'') + ELSE '' + END || + ')' INTO attr_value; + + END LOOP; + + -- Add the new value to the hstore + new_attrs := new_attrs || hstore( attr_def.name, attr_value ); + + END LOOP; + + RETURN new_attrs; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.extract_rec_attrs(text, text[]) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/009.kmain-806-populate_separate_browse_entry_simple_heading_map.sql b/KCLS/sql/browse/combined/009.kmain-806-populate_separate_browse_entry_simple_heading_map.sql new file mode 100644 index 0000000000..43ab53fcad --- /dev/null +++ b/KCLS/sql/browse/combined/009.kmain-806-populate_separate_browse_entry_simple_heading_map.sql @@ -0,0 +1,216 @@ +-- Populate split metabib.browse_entry_simple_heading_map tables for author, series, subject, and title +-------------------------------------------------------------------------------------------------------- + +BEGIN; + +-- Add constraints + +ALTER TABLE metabib.browse_author_entry_simple_heading_map + ADD PRIMARY KEY (id); + +ALTER TABLE metabib.browse_author_entry_simple_heading_map + ADD CONSTRAINT browse_author_entry_simple_heading_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_author_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE metabib.browse_author_entry_simple_heading_map + ADD CONSTRAINT browse_author_entry_simple_heading_map_simple_heading_fkey FOREIGN KEY (simple_heading) + REFERENCES authority.simple_heading (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE CASCADE; + +-- Populate metabib.browse_author_entry_simple_heading_map + +INSERT INTO metabib.browse_author_entry_simple_heading_map + (entry, simple_heading) + ( + SELECT mbae.id, ash.id + FROM metabib.browse_author_entry mbae + JOIN authority.simple_heading ash + ON mbae.sort_value ILIKE ash.sort_value + ); + +-- Index: metabib.browse_author_entry_sh_map_entry_idx + +-- DROP INDEX metabib.browse_author_entry_sh_map_entry_idx; + +CREATE INDEX browse_author_entry_sh_map_entry_idx + ON metabib.browse_author_entry_simple_heading_map + USING btree (entry); + +-- Index: metabib.browse_author_entry_sh_map_sh_idx + +-- DROP INDEX metabib.browse_author_entry_sh_map_sh_idx; + +CREATE INDEX browse_author_entry_sh_map_sh_idx + ON metabib.browse_author_entry_simple_heading_map + USING btree (simple_heading); + +COMMIT; + + +--------------------------------------------------------------------------------------------------------- + +BEGIN; + +-- Add constraints + +ALTER TABLE metabib.browse_series_entry_simple_heading_map + ADD PRIMARY KEY (id); + +ALTER TABLE metabib.browse_series_entry_simple_heading_map + ADD CONSTRAINT browse_series_entry_simple_heading_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_series_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE metabib.browse_series_entry_simple_heading_map + ADD CONSTRAINT browse_series_entry_simple_heading_map_simple_heading_fkey FOREIGN KEY (simple_heading) + REFERENCES authority.simple_heading (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE CASCADE; + +-- Populate metabib.browse_series_entry_simple_heading_map + +INSERT INTO metabib.browse_series_entry_simple_heading_map + (entry, simple_heading) + ( + SELECT mbae.id, ash.id + FROM metabib.browse_series_entry mbae + JOIN authority.simple_heading ash + ON mbae.sort_value ILIKE ash.sort_value + ); + +-- Index: metabib.browse_series_entry_sh_map_entry_idx + +-- DROP INDEX metabib.browse_series_entry_sh_map_entry_idx; + +CREATE INDEX browse_series_entry_sh_map_entry_idx + ON metabib.browse_series_entry_simple_heading_map + USING btree (entry); + +-- Index: metabib.browse_series_entry_sh_map_sh_idx + +-- DROP INDEX metabib.browse_series_entry_sh_map_sh_idx; + +CREATE INDEX browse_series_entry_sh_map_sh_idx + ON metabib.browse_series_entry_simple_heading_map + USING btree (simple_heading); + +COMMIT; + + +--------------------------------------------------------------------------------------------------------- + +BEGIN; + +-- Add constraints + +ALTER TABLE metabib.browse_subject_entry_simple_heading_map + ADD PRIMARY KEY (id); + +ALTER TABLE metabib.browse_subject_entry_simple_heading_map + ADD CONSTRAINT browse_subject_entry_simple_heading_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_subject_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE metabib.browse_subject_entry_simple_heading_map + ADD CONSTRAINT browse_subject_entry_simple_heading_map_simple_heading_fkey FOREIGN KEY (simple_heading) + REFERENCES authority.simple_heading (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE CASCADE; + + +-- Populate metabib.browse_subject_entry_simple_heading_map + +INSERT INTO metabib.browse_subject_entry_simple_heading_map + (entry, simple_heading) + ( + SELECT mbae.id, ash.id + FROM metabib.browse_subject_entry mbae + JOIN authority.simple_heading ash + ON mbae.sort_value ILIKE ash.sort_value + ); + +-- Index: metabib.browse_subject_entry_sh_map_entry_idx + +-- DROP INDEX metabib.browse_subject_entry_sh_map_entry_idx; + +CREATE INDEX browse_subject_entry_sh_map_entry_idx + ON metabib.browse_subject_entry_simple_heading_map + USING btree (entry); + +-- Index: metabib.browse_subject_entry_sh_map_sh_idx + +-- DROP INDEX metabib.browse_subject_entry_sh_map_sh_idx; + +CREATE INDEX browse_subject_entry_sh_map_sh_idx + ON metabib.browse_subject_entry_simple_heading_map + USING btree (simple_heading); + +COMMIT; + + +--------------------------------------------------------------------------------------------------------- + +BEGIN; + +-- Add constraints + +ALTER TABLE metabib.browse_title_entry_simple_heading_map + ADD PRIMARY KEY (id); + +ALTER TABLE metabib.browse_title_entry_simple_heading_map + ADD CONSTRAINT browse_title_entry_simple_heading_map_entry_fkey FOREIGN KEY (entry) + REFERENCES metabib.browse_title_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION; + +ALTER TABLE metabib.browse_title_entry_simple_heading_map + ADD CONSTRAINT browse_title_entry_simple_heading_map_simple_heading_fkey FOREIGN KEY (simple_heading) + REFERENCES authority.simple_heading (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE CASCADE; + +-- Populate metabib.browse_title_entry_simple_heading_map + +INSERT INTO metabib.browse_title_entry_simple_heading_map + (entry, simple_heading) + ( + SELECT mbae.id, ash.id + FROM metabib.browse_title_entry mbae + JOIN authority.simple_heading ash + ON mbae.sort_value ILIKE ash.sort_value + ); + +-- Index: metabib.browse_title_entry_sh_map_entry_idx + +-- DROP INDEX metabib.browse_title_entry_sh_map_entry_idx; + +CREATE INDEX browse_title_entry_sh_map_entry_idx + ON metabib.browse_title_entry_simple_heading_map + USING btree (entry); + +-- Index: metabib.browse_title_entry_sh_map_sh_idx + +-- DROP INDEX metabib.browse_title_entry_sh_map_sh_idx; + +CREATE INDEX browse_title_entry_sh_map_sh_idx + ON metabib.browse_title_entry_simple_heading_map + USING btree (simple_heading); + +COMMIT; + + +------------------------------------------------------------------------------------------------------- +-- Now that the new tables have been created, the following functions have been modified in the +-- combined browse scripts. +-- 003 -- +-- metabib.browse_author_authority_refs_pivot +-- metabib.browse_series_authority_refs_pivot +-- metabib.browse_subject_authority_refs_pivot +-- metabib.browse_title_authority_refs_pivot + +-- 007 -- +-- metabib.staged_browse +-- metabib.browse +-- authority.indexing_ingest_or_delete -- updates data when authority records changed +-- -- ongoing updates to the 4 new tables + +-- 001 -- +-- Removed the creation of the being split up. +------------------------------------------------------------------------------------------------------- \ No newline at end of file diff --git a/KCLS/sql/browse/combined/010.kmain-935-modify_biblio_indexing_ingest_or_delete.sql b/KCLS/sql/browse/combined/010.kmain-935-modify_biblio_indexing_ingest_or_delete.sql new file mode 100644 index 0000000000..71fbda65c1 --- /dev/null +++ b/KCLS/sql/browse/combined/010.kmain-935-modify_biblio_indexing_ingest_or_delete.sql @@ -0,0 +1,187 @@ +BEGIN; + +-- Function: biblio.indexing_ingest_or_delete() + +-- DROP FUNCTION biblio.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION biblio.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + transformed_xml TEXT; + prev_xfrm TEXT; + normalizer RECORD; + xfrm config.xml_transform%ROWTYPE; + attr_value TEXT; + new_attrs HSTORE := ''::HSTORE; + attr_def config.record_attr_definition%ROWTYPE; +BEGIN + + IF NEW.deleted IS TRUE THEN -- If this bib is deleted + PERFORM * FROM config.internal_flag WHERE + name = 'ingest.metarecord_mapping.preserve_on_delete' AND enabled; + IF NOT FOUND THEN + -- One needs to keep these around to support searches + -- with the #deleted modifier, so one should turn on the named + -- internal flag for that functionality. + DELETE FROM metabib.metarecord_source_map WHERE source = NEW.id; + DELETE FROM metabib.record_attr WHERE id = NEW.id; + END IF; + + DELETE FROM authority.bib_linking WHERE bib = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM biblio.peer_bib_copy_map WHERE peer_record = NEW.id; -- Separate any multi-homed items + DELETE FROM metabib.browse_author_entry_def_map WHERE source = NEW.id; -- Don't auto-suggest deleted bibs + DELETE FROM metabib.browse_series_entry_def_map WHERE source = NEW.id; -- Don't auto-suggest deleted bibs + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = NEW.id; -- Don't auto-suggest deleted bibs + DELETE FROM metabib.browse_title_entry_def_map WHERE source = NEW.id; -- Don't auto-suggest deleted bibs + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = NEW.id; -- Don't auto-suggest deleted bibs + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + END IF; + + -- Record authority linking + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_linking' AND enabled; + IF NOT FOUND THEN + PERFORM biblio.map_authority_linking( NEW.id, NEW.marc ); + END IF; + + -- Flatten and insert the mfr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_metabib_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM metabib.reingest_metabib_full_rec(NEW.id); + + -- Now we pull out attribute data, which is dependent on the mfr for all but XPath-based fields + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_metabib_rec_descriptor' AND enabled; + IF NOT FOUND THEN + FOR attr_def IN SELECT * FROM config.record_attr_definition ORDER BY format LOOP + + IF attr_def.tag IS NOT NULL THEN -- tag (and optional subfield list) selection + SELECT ARRAY_TO_STRING(ARRAY_ACCUM(value), COALESCE(attr_def.joiner,' ')) INTO attr_value + FROM (SELECT * FROM metabib.full_rec ORDER BY tag, subfield) AS x + WHERE record = NEW.id + AND tag LIKE attr_def.tag + AND CASE + WHEN attr_def.sf_list IS NOT NULL + THEN POSITION(subfield IN attr_def.sf_list) > 0 + ELSE TRUE + END + GROUP BY tag + ORDER BY tag + LIMIT 1; + + ELSIF attr_def.fixed_field IS NOT NULL THEN -- a named fixed field, see config.marc21_ff_pos_map.fixed_field + attr_value := biblio.marc21_extract_fixed_field(NEW.id, attr_def.fixed_field); + + ELSIF attr_def.xpath IS NOT NULL THEN -- and xpath expression + + SELECT INTO xfrm * FROM config.xml_transform WHERE name = attr_def.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(NEW.marc,xfrm.xslt); + ELSE + transformed_xml := NEW.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + IF xfrm.name IS NULL THEN + -- just grab the marcxml (empty) transform + SELECT INTO xfrm * FROM config.xml_transform WHERE xslt = '---' LIMIT 1; + prev_xfrm := xfrm.name; + END IF; + + attr_value := oils_xpath_string(attr_def.xpath, transformed_xml, COALESCE(attr_def.joiner,' '), ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]]); + + ELSIF attr_def.phys_char_sf IS NOT NULL THEN -- a named Physical Characteristic, see config.marc21_physical_characteristic_*_map + SELECT m.value INTO attr_value + FROM biblio.marc21_physical_characteristics(NEW.id) v + JOIN config.marc21_physical_characteristic_value_map m ON (m.id = v.value) + WHERE v.subfield = attr_def.phys_char_sf + LIMIT 1; -- Just in case ... + + END IF; + + -- apply index normalizers to attr_value + FOR normalizer IN + SELECT n.func AS func, + n.param_count AS param_count, + m.params AS params + FROM config.index_normalizer n + JOIN config.record_attr_index_norm_map m ON (m.norm = n.id) + WHERE attr = attr_def.name + ORDER BY m.pos LOOP + EXECUTE 'SELECT ' || normalizer.func || '(' || + COALESCE( quote_literal( attr_value ), 'NULL' ) || + CASE + WHEN normalizer.param_count > 0 + THEN ',' || REPLACE(REPLACE(BTRIM(normalizer.params,'[]'),E'\'',E'\\\''),E'"',E'\'') + ELSE '' + END || + ')' INTO attr_value; + + END LOOP; + + -- Add the new value to the hstore + new_attrs := new_attrs || hstore( attr_def.name, attr_value ); + + END LOOP; + + IF TG_OP = 'INSERT' OR OLD.deleted THEN -- initial insert OR revivication + DELETE FROM metabib.record_attr WHERE id = NEW.id; + INSERT INTO metabib.record_attr (id, attrs) VALUES (NEW.id, new_attrs); + ELSE + UPDATE metabib.record_attr SET attrs = new_attrs WHERE id = NEW.id; + END IF; + + END IF; + END IF; + + -- Gather and insert the field entry data + PERFORM metabib.reingest_metabib_field_entries(NEW.id); + + -- Located URI magic + IF TG_OP = 'INSERT' THEN + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_located_uri' AND enabled; + IF NOT FOUND THEN + PERFORM biblio.extract_located_uris( NEW.id, NEW.marc, NEW.editor ); + END IF; + ELSE + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_located_uri' AND enabled; + IF NOT FOUND THEN + PERFORM biblio.extract_located_uris( NEW.id, NEW.marc, NEW.editor ); + END IF; + END IF; + + -- (re)map metarecord-bib linking + IF TG_OP = 'INSERT' THEN -- if not deleted and performing an insert, check for the flag + PERFORM * FROM config.internal_flag WHERE name = 'ingest.metarecord_mapping.skip_on_insert' AND enabled; + IF NOT FOUND THEN + PERFORM metabib.remap_metarecord_for_bib( NEW.id, NEW.fingerprint ); + END IF; + ELSE -- we're doing an update, and we're not deleted, remap + PERFORM * FROM config.internal_flag WHERE name = 'ingest.metarecord_mapping.skip_on_update' AND enabled; + IF NOT FOUND THEN + PERFORM metabib.remap_metarecord_for_bib( NEW.id, NEW.fingerprint ); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION biblio.indexing_ingest_or_delete() + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/011.kmain-1011-metabib.triggered_reingest_for_bib_set.sql b/KCLS/sql/browse/combined/011.kmain-1011-metabib.triggered_reingest_for_bib_set.sql new file mode 100644 index 0000000000..59ca5fffa6 --- /dev/null +++ b/KCLS/sql/browse/combined/011.kmain-1011-metabib.triggered_reingest_for_bib_set.sql @@ -0,0 +1,31 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION metabib.triggered_reingest_for_bib_set(start_id bigint, stop_id bigint) + RETURNS BIGINT AS +$BODY$ +DECLARE + record_id BIGINT; + reingest_count BIGINT; +BEGIN + + UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.reingest.force_on_same_marc'; + + reingest_count = 0; + FOR record_id IN (SELECT id FROM biblio.record_entry WHERE id >= start_id AND id <= stop_id AND NOT deleted ORDER BY id) LOOP + + UPDATE biblio.record_entry SET id = id WHERE id = record_id; + reingest_count = reingest_count + 1; + + END LOOP; + + UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.reingest.force_on_same_marc'; + + RETURN reingest_count; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.triggered_reingest_for_bib_set(bigint, bigint) + OWNER TO evergreen; + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/browse/combined/012.kmain-1039-authority-unlinked-bibs-to-auth.sql b/KCLS/sql/browse/combined/012.kmain-1039-authority-unlinked-bibs-to-auth.sql new file mode 100644 index 0000000000..6b392cdfb8 --- /dev/null +++ b/KCLS/sql/browse/combined/012.kmain-1039-authority-unlinked-bibs-to-auth.sql @@ -0,0 +1,120 @@ + +BEGIN; + +-- Function: authority.unlinked_bibs_to_given_auth_id(bigint) + +-- DROP FUNCTION authority.unlinked_bibs_to_given_auth_id(bigint); + +CREATE OR REPLACE FUNCTION authority.unlinked_bibs_to_given_auth_id(auth_id bigint) + RETURNS bigint[] AS +$BODY$ +DECLARE + wheres TEXT[]; + single_where TEXT; + query TEXT; + results_cursor REFCURSOR; + bibs BIGINT[]; + first INT := 1; + +BEGIN + -- ver 2.0 - KMAIN-1090: This version compares the authority.full_rec to metabib.real_full_rec. + + SELECT INTO wheres COALESCE(ARRAY_AGG('subfield = ''' || subfield || ''' AND value = ''' || value || ''''), ARRAY[]::TEXT[]) + FROM authority.full_rec WHERE record = auth_id AND tag ILIKE '1__'; + + query := 'SELECT COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) FROM ('; + + FOREACH single_where IN ARRAY wheres + LOOP + IF first < 1 THEN + -- not first subquery + query := query || ' INTERSECT (SELECT record FROM metabib.real_full_rec WHERE ' || single_where || ')'; + ELSE + -- first subquery + query := query || '(SELECT record FROM metabib.real_full_rec WHERE ' || single_where || ')'; + first := 0; + END IF; + END LOOP; + + query := query || ') AS X'; + + OPEN results_cursor FOR EXECUTE query; + + FETCH results_cursor INTO bibs; + + RETURN bibs; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.unlinked_bibs_to_given_auth_id(bigint) + OWNER TO evergreen; + + +-- Function: authority.unlinked_bibs_to_given_auth_id(bigint) + +-- DROP FUNCTION authority.unlinked_bibs_to_given_auth_id(bigint); + +CREATE OR REPLACE FUNCTION authority.unlinked_bibs_to_given_auth_text(search_class text, hundredXX_value text) + RETURNS bigint[] AS +$BODY$ +DECLARE + bibs BIGINT[]; +BEGIN + -- ver 0.1 + + -- Add % to end to take into consideration additonal characters like '.' and date ($d). + -- This may get us more bibs then wanted but will a little more is better than missing some. + hundredXX_value := hundredXX_value || '%'; + + CASE search_class + WHEN 'author' THEN + SELECT INTO bibs COALESCE(ARRAY_AGG(mbaedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_author_entry AS mbae + LEFT OUTER JOIN metabib.browse_author_entry_simple_heading_map AS mbaeshm + ON mbae.id = mbaeshm.entry + JOIN metabib.browse_author_entry_def_map AS mbaedm + ON mbae.id = mbaedm.entry + WHERE mbaeshm.id IS NULL AND mbae.value ILIKE hundredXX_value; + + WHEN 'title' THEN + -- title means series title + -- SELECT INTO bibs COALESCE(ARRAY_AGG(mbsedm.source), ARRAY[]::BIGINT[]) + -- FROM metabib.browse_series_entry AS mbse + -- LEFT OUTER JOIN metabib.browse_series_entry_simple_heading_map AS mbseshm + -- ON mbse.id = mbseshm.entry + -- JOIN metabib.browse_series_entry_def_map AS mbsedm + -- ON mbse.id = mbsedm.entry + -- WHERE mbseshm.id IS NULL AND mbse.value ILIKE hundredXX_value; + + SELECT INTO bibs COALESCE(ARRAY_AGG(mbtedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_title_entry AS mbte + LEFT OUTER JOIN metabib.browse_title_entry_simple_heading_map AS mbteshm + ON mbte.id = mbteshm.entry + JOIN metabib.browse_title_entry_def_map AS mbtedm + ON mbte.id = mbtedm.entry + WHERE mbteshm.id IS NULL AND mbte.value ILIKE hundredXX_value; + + WHEN 'subject' THEN + SELECT INTO bibs COALESCE(ARRAY_AGG(mbsedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_subject_entry AS mbse + LEFT OUTER JOIN metabib.browse_subject_entry_simple_heading_map AS mbseshm + ON mbse.id = mbseshm.entry + JOIN metabib.browse_subject_entry_def_map AS mbsedm + ON mbse.id = mbsedm.entry + WHERE mbseshm.id IS NULL AND mbse.value ILIKE hundredXX_value; + + ELSE + + END CASE; + + RETURN bibs; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.unlinked_bibs_to_given_auth_text(text, text) + OWNER TO evergreen; + + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/browse/combined/013.kmain-1053-auth_n_bib_generator.sql b/KCLS/sql/browse/combined/013.kmain-1053-auth_n_bib_generator.sql new file mode 100644 index 0000000000..7e581531dc --- /dev/null +++ b/KCLS/sql/browse/combined/013.kmain-1053-auth_n_bib_generator.sql @@ -0,0 +1,119 @@ + +BEGIN; + +-- Type: public.auth_n_bib +-- This type will be used as the return for the stored procedure public.auth_n_bib_generator(auth_heading text) +-- DROP TYPE public.auth_n_bib; + +CREATE TYPE public.auth_n_bib AS + ( + primary_auth bigint, + auths bigint[], + bibs bigint[] + ); +ALTER TYPE public.auth_n_bib + OWNER TO evergreen; + + +-- Function: public.generate_bibs_associated_to_auth_heading(text) + +-- DROP FUNCTION public.generate_bibs_associated_to_auth_heading(text, text); + +CREATE OR REPLACE FUNCTION public.generate_bibs_associated_to_auth_heading(field text, auth_heading text) + RETURNS BIGINT[] AS +$BODY$ +DECLARE + temp_bibs BIGINT[] := '{}'; +BEGIN + -- ver 0.1 + + -- Add % to end to take into consideration additonal characters like '.' and date ($d). + -- This may get us more bibs then wanted but will a little more is better than missing some. + auth_heading := auth_heading || '%'; + + -- Get all bib record id's associated with the primary auth record + CASE field + WHEN '100' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag IN ('100', '600', '700', '800'); + WHEN '110' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag IN ('110', '610', '710', '810'); + WHEN '111' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag IN ('111', '611', '711', '811'); + WHEN '130' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag IN ('130', '630', '730', '830'); + WHEN '150' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag LIKE '650'; + WHEN '151' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag LIKE '651'; + WHEN '155' THEN + SELECT INTO temp_bibs COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) + FROM metabib.real_full_rec WHERE value ILIKE auth_heading AND subfield LIKE 'a' AND tag LIKE '655'; + ELSE + END CASE; + + RETURN temp_bibs; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION public.generate_bibs_associated_to_auth_heading(text, text) + OWNER TO evergreen; + + +-- Function: public.auth_n_bib_generator(text) + +-- DROP FUNCTION public.auth_n_bib_generator(text, text); + +CREATE OR REPLACE FUNCTION public.auth_n_bib_generator(auth_heading text) + RETURNS auth_n_bib AS +$BODY$ +DECLARE + auth_n_bib_data auth_n_bib; + temp_bibs BIGINT[] := '{}'; + temp_afr authority.full_rec%ROWTYPE; + sub_tag TEXT; +BEGIN + -- ver 0.3 + + -- Add % to end to take into consideration additonal characters like '.' and date ($d). + -- This may get us more bibs then wanted but will a little more is better than missing some. + auth_heading := auth_heading || '%'; + + -- Get the auth record full rec for the given auth heading + SELECT * INTO temp_afr FROM authority.full_rec WHERE value ILIKE auth_heading AND tag LIKE '1__' AND subfield LIKE 'a'; + + --------- PRIMARY AUTH RECORD --------- + -- Set the primary auth record id + auth_n_bib_data.primary_auth := temp_afr.record; + + -- Get all bib record id's associated with the primary auth record + SELECT * INTO auth_n_bib_data.bibs FROM public.generate_bibs_associated_to_auth_heading(temp_afr.tag, auth_heading); + + --------- LINKED AUTH RECORDS --------- + -- Loop for getting all linked auth record id's and their associated bib record id's + FOR temp_afr IN SELECT afr2.* FROM authority.full_rec AS afr1 JOIN authority.full_rec AS afr2 ON afr1.record = afr2.record WHERE afr1.value ILIKE auth_heading AND afr1.tag LIKE '5__' AND afr2.tag LIKE '1__' AND afr2.subfield LIKE 'a' LOOP + -- Add auth record id to the list + auth_n_bib_data.auths := array_append(auth_n_bib_data.auths, temp_afr.record); + + -- Add linked bib id's to the list + SELECT * INTO temp_bibs FROM public.generate_bibs_associated_to_auth_heading(temp_afr.tag, temp_afr.value); + SELECT INTO auth_n_bib_data.bibs ARRAY( SELECT unnest(auth_n_bib_data.bibs) AS e UNION SELECT unnest(temp_bibs) AS e ORDER BY e); + END LOOP; + + RETURN auth_n_bib_data; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION public.auth_n_bib_generator(text) + OWNER TO evergreen; + + +COMMIT; + diff --git a/KCLS/sql/browse/combined/014.kmain-1105-authority.unlinked_auths_to_given_auth_id.sql b/KCLS/sql/browse/combined/014.kmain-1105-authority.unlinked_auths_to_given_auth_id.sql new file mode 100644 index 0000000000..4de4ac2195 --- /dev/null +++ b/KCLS/sql/browse/combined/014.kmain-1105-authority.unlinked_auths_to_given_auth_id.sql @@ -0,0 +1,50 @@ +-- Function: authority.unlinked_auths_to_given_auth_id(bigint) + +-- DROP FUNCTION authority.unlinked_auths_to_given_auth_id(bigint); + +-- This stored procedure returns an array of auth id's that should link to a given auth record id. + +CREATE OR REPLACE FUNCTION authority.unlinked_auths_to_given_auth_id(auth_id bigint) + RETURNS bigint[] AS +$BODY$ +DECLARE + wheres TEXT[]; + single_where TEXT; + query TEXT; + results_cursor REFCURSOR; + auths BIGINT[]; + first INT := 1; + +BEGIN + -- ver 1.0 - KMAIN-1105 + + SELECT INTO wheres COALESCE(ARRAY_AGG('subfield = ''' || subfield || ''' AND value = ''' || value || ''''), ARRAY[]::TEXT[]) + FROM authority.full_rec WHERE record = auth_id AND tag LIKE '1__'; + + query := 'SELECT COALESCE(ARRAY_AGG(record), ARRAY[]::BIGINT[]) FROM ('; + + FOREACH single_where IN ARRAY wheres + LOOP + IF first < 1 THEN + -- not first subquery + query := query || ' INTERSECT (SELECT record FROM authority.full_rec WHERE ' || single_where || ' AND tag NOT LIKE ''1__'')'; + ELSE + -- first subquery + query := query || '(SELECT record FROM authority.full_rec WHERE ' || single_where || ' AND tag NOT LIKE ''1__'')'; + first := 0; + END IF; + END LOOP; + + query := query || ') AS X'; + + OPEN results_cursor FOR EXECUTE query; + + FETCH results_cursor INTO auths; + + RETURN auths; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.unlinked_auths_to_given_auth_id(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/combined/015.create_vandelay_queue_and_merge_profile.sql b/KCLS/sql/browse/combined/015.create_vandelay_queue_and_merge_profile.sql new file mode 100644 index 0000000000..d7a68e6c6b --- /dev/null +++ b/KCLS/sql/browse/combined/015.create_vandelay_queue_and_merge_profile.sql @@ -0,0 +1,7 @@ +BEGIN; + +INSERT INTO vandelay.bib_queue (owner, name, complete) VALUES (1, 'unupdated_bib_because_modified_since_export', false); + +INSERT INTO vandelay.merge_profile (owner, name, preserve_spec) VALUES (1, 'Backstage Preserve Import', '092,099,915,938'); + +COMMIT; diff --git a/KCLS/sql/browse/combined/016.kmain-1113-ongoing_generate_auth_ids_for_auth_to_auth_given_date.sql b/KCLS/sql/browse/combined/016.kmain-1113-ongoing_generate_auth_ids_for_auth_to_auth_given_date.sql new file mode 100644 index 0000000000..77a8e48f9a --- /dev/null +++ b/KCLS/sql/browse/combined/016.kmain-1113-ongoing_generate_auth_ids_for_auth_to_auth_given_date.sql @@ -0,0 +1,46 @@ +BEGIN; + +-- Function: authority.ongoing_generate_auth_ids_for_auth_to_auth_given_date(text) + +-- DROP FUNCTION authority.ongoing_generate_auth_ids_for_auth_to_auth_given_date(text); + +CREATE OR REPLACE FUNCTION authority.ongoing_generate_auth_ids_for_auth_to_auth_given_date(import_date text) + RETURNS SETOF BIGINT AS +$BODY$ +DECLARE + base_ids BIGINT[]; + base_one BIGINT; + base_query TEXT; + results_cursor REFCURSOR; + add_query TEXT; + auths BIGINT[]; + linked_auths BIGINT[]; + +BEGIN + -- ver 1.0 - KMAIN-1113 + + -- Generate a list of auth records that have been modified or created on the given day. + base_query := 'SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::BIGINT[]) FROM authority.record_entry WHERE edit_date BETWEEN ''' || import_date || ' 00:00:00-07'' AND ''' || import_date || ' 23:59:59-07'' AND NOT deleted'; + + OPEN results_cursor FOR EXECUTE base_query; + FETCH results_cursor INTO base_ids; + + auths := base_ids; + + FOREACH base_one IN ARRAY base_ids + LOOP + -- Find the auth id's that should link with the given auth id + SELECT * INTO linked_auths FROM authority.unlinked_auths_to_given_auth_id(base_one); + -- Combine newly found auth id's (linked_auths) to previously found ones (auths) + SELECT INTO auths ARRAY( SELECT unnest(auths) AS e UNION SELECT unnest(linked_auths) AS e ORDER BY e); + END LOOP; + + RETURN QUERY SELECT unnest(auths); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.ongoing_generate_auth_ids_for_auth_to_auth_given_date(text) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/017.kmain-1119-remove_duplicate_browse_entries.sql b/KCLS/sql/browse/combined/017.kmain-1119-remove_duplicate_browse_entries.sql new file mode 100644 index 0000000000..8ae88e6c31 --- /dev/null +++ b/KCLS/sql/browse/combined/017.kmain-1119-remove_duplicate_browse_entries.sql @@ -0,0 +1,440 @@ + +BEGIN; + +-- Function: metabib.triggered_reingest_for_auth_id(bigint) +-- DROP FUNCTION metabib.triggered_reingest_for_auth_id(bigint); +-- This function will reingest the auth record that is given by id. + +CREATE OR REPLACE FUNCTION metabib.triggered_reingest_for_auth_id(auth_id bigint) + RETURNS VOID AS +$BODY$ +DECLARE + +BEGIN + + UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.reingest.force_on_same_marc'; + + UPDATE authority.record_entry SET id = id WHERE id = auth_id; + + UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.reingest.force_on_same_marc'; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.triggered_reingest_for_auth_id(bigint) + OWNER TO evergreen; + + +-- Function: metabib.remove_duplicate_browse_entries() + +-- DROP FUNCTION metabib.remove_duplicate_browse_entries(); + +CREATE OR REPLACE FUNCTION metabib.remove_duplicate_browse_entries() + RETURNS TEXT[] AS +$BODY$ +DECLARE + bib_ids BIGINT[]; + bib_one BIGINT; + auth_ids BIGINT[]; + auth_one BIGINT; + be_ids BIGINT[]; + be_one BIGINT; + + result_message TEXT[]; + dup_count BIGINT; + +BEGIN + -- ver 1.0 - KMAIN-1119 + + ---------------------------------- AUTHOR - Duplicates in metabib.browse_author_entry ---------------------------------- + -- Generate an array of bib id's that point to a duplicate browse_entry + SELECT INTO bib_ids COALESCE(ARRAY_AGG(DISTINCT mbedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_author_entry AS mbe + JOIN metabib.browse_author_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_author_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the bib id's from array created previously + FOREACH bib_one IN ARRAY bib_ids + LOOP + PERFORM metabib.reingest_metabib_field_entries(bib_one, true, false, true); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_author_entry AS mbe + LEFT OUTER JOIN metabib.browse_author_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_author_entry AS mbe + LEFT OUTER JOIN metabib.browse_author_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_author_entry WHERE id = be_one; + END LOOP; + + -- Generate a list of auth id's that point to a duplicate browse_entry + SELECT INTO auth_ids COALESCE(ARRAY_AGG(DISTINCT ash.record), ARRAY[]::BIGINT[]) + FROM metabib.browse_author_entry AS mbe + JOIN metabib.browse_author_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + JOIN authority.simple_heading AS ash + ON ash.id = mbeshm.simple_heading + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_author_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the auth id's from array created previously + FOREACH auth_one IN ARRAY auth_ids + LOOP + PERFORM metabib.triggered_reingest_for_auth_id(auth_one); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_author_entry AS mbe + LEFT OUTER JOIN metabib.browse_author_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_author_entry AS mbe + LEFT OUTER JOIN metabib.browse_author_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_author_entry WHERE id = be_one; + END LOOP; + + SELECT INTO dup_count COUNT(kyle.sort_value) + FROM (SELECT COUNT(id), sort_value FROM metabib.browse_author_entry + GROUP BY sort_value + HAVING COUNT(id) > 1) as kyle; + + result_message := array_append(result_message, 'browse_author_entry duplicates = ' || dup_count); + + + ---------------------------------- SERIES - Duplicates in metabib.browse_series_entry ---------------------------------- + -- Generate an array of bib id's that point to a duplicate browse_entry + SELECT INTO bib_ids COALESCE(ARRAY_AGG(DISTINCT mbedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_series_entry AS mbe + JOIN metabib.browse_series_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_series_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the bib id's from array created previously + FOREACH bib_one IN ARRAY bib_ids + LOOP + PERFORM metabib.reingest_metabib_field_entries(bib_one, true, false, true); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_series_entry AS mbe + LEFT OUTER JOIN metabib.browse_series_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_series_entry AS mbe + LEFT OUTER JOIN metabib.browse_series_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_series_entry WHERE id = be_one; + END LOOP; + + -- Generate a list of auth id's that point to a duplicate browse_entry + SELECT INTO auth_ids COALESCE(ARRAY_AGG(DISTINCT ash.record), ARRAY[]::BIGINT[]) + FROM metabib.browse_series_entry AS mbe + JOIN metabib.browse_series_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + JOIN authority.simple_heading AS ash + ON ash.id = mbeshm.simple_heading + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_series_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the auth id's from array created previously + FOREACH auth_one IN ARRAY auth_ids + LOOP + PERFORM metabib.triggered_reingest_for_auth_id(auth_one); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_series_entry AS mbe + LEFT OUTER JOIN metabib.browse_series_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_series_entry AS mbe + LEFT OUTER JOIN metabib.browse_series_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_series_entry WHERE id = be_one; + END LOOP; + + SELECT INTO dup_count COUNT(kyle.sort_value) + FROM (SELECT COUNT(id), sort_value FROM metabib.browse_series_entry + GROUP BY sort_value + HAVING COUNT(id) > 1) as kyle; + + result_message := array_append(result_message, 'browse_series_entry duplicates = ' || dup_count); + + + ---------------------------------- SUBJECT - Duplicates in metabib.browse_subject_entry ---------------------------------- + -- Generate an array of bib id's that point to a duplicate browse_entry + SELECT INTO bib_ids COALESCE(ARRAY_AGG(DISTINCT mbedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_subject_entry AS mbe + JOIN metabib.browse_subject_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_subject_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the bib id's from array created previously + FOREACH bib_one IN ARRAY bib_ids + LOOP + PERFORM metabib.reingest_metabib_field_entries(bib_one, true, false, true); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_subject_entry AS mbe + LEFT OUTER JOIN metabib.browse_subject_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_subject_entry AS mbe + LEFT OUTER JOIN metabib.browse_subject_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_subject_entry WHERE id = be_one; + END LOOP; + + -- Generate a list of auth id's that point to a duplicate browse_entry + SELECT INTO auth_ids COALESCE(ARRAY_AGG(DISTINCT ash.record), ARRAY[]::BIGINT[]) + FROM metabib.browse_subject_entry AS mbe + JOIN metabib.browse_subject_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + JOIN authority.simple_heading AS ash + ON ash.id = mbeshm.simple_heading + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_subject_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the auth id's from array created previously + FOREACH auth_one IN ARRAY auth_ids + LOOP + PERFORM metabib.triggered_reingest_for_auth_id(auth_one); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_subject_entry AS mbe + LEFT OUTER JOIN metabib.browse_subject_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_subject_entry AS mbe + LEFT OUTER JOIN metabib.browse_subject_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_subject_entry WHERE id = be_one; + END LOOP; + + SELECT INTO dup_count COUNT(kyle.sort_value) + FROM (SELECT COUNT(id), sort_value FROM metabib.browse_subject_entry + GROUP BY sort_value + HAVING COUNT(id) > 1) as kyle; + + result_message := array_append(result_message, 'browse_subject_entry duplicates = ' || dup_count); + + + ---------------------------------- TITLE - Duplicates in metabib.browse_title_entry ---------------------------------- + -- Generate an array of bib id's that point to a duplicate browse_entry + SELECT INTO bib_ids COALESCE(ARRAY_AGG(DISTINCT mbedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_title_entry AS mbe + JOIN metabib.browse_title_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_title_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the bib id's from array created previously + FOREACH bib_one IN ARRAY bib_ids + LOOP + PERFORM metabib.reingest_metabib_field_entries(bib_one, true, false, true); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_title_entry AS mbe + LEFT OUTER JOIN metabib.browse_title_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_title_entry AS mbe + LEFT OUTER JOIN metabib.browse_title_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_title_entry WHERE id = be_one; + END LOOP; + + -- Generate a list of auth id's that point to a duplicate browse_entry + SELECT INTO auth_ids COALESCE(ARRAY_AGG(DISTINCT ash.record), ARRAY[]::BIGINT[]) + FROM metabib.browse_title_entry AS mbe + JOIN metabib.browse_title_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + JOIN authority.simple_heading AS ash + ON ash.id = mbeshm.simple_heading + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_title_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the auth id's from array created previously + FOREACH auth_one IN ARRAY auth_ids + LOOP + PERFORM metabib.triggered_reingest_for_auth_id(auth_one); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_title_entry AS mbe + LEFT OUTER JOIN metabib.browse_title_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + INTERSECT + SELECT mbe.id FROM metabib.browse_title_entry AS mbe + LEFT OUTER JOIN metabib.browse_title_entry_simple_heading_map AS mbeshm + ON mbe.id = mbeshm.entry + WHERE mbeshm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_title_entry WHERE id = be_one; + END LOOP; + + SELECT INTO dup_count COUNT(kyle.sort_value) + FROM (SELECT COUNT(id), sort_value FROM metabib.browse_title_entry + GROUP BY sort_value + HAVING COUNT(id) > 1) as kyle; + + result_message := array_append(result_message, 'browse_title_entry duplicates = ' || dup_count); + + + ---------------------------------- CALL NUMBER - Duplicates in metabib.browse_call_number_entry ---------------------------------- + -- Generate an array of bib id's that point to a duplicate browse_entry + SELECT INTO bib_ids COALESCE(ARRAY_AGG(DISTINCT mbedm.source), ARRAY[]::BIGINT[]) + FROM metabib.browse_call_number_entry AS mbe + JOIN metabib.browse_call_number_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbe.sort_value IN ( + SELECT mbe.sort_value + FROM metabib.browse_call_number_entry AS mbe + GROUP BY mbe.sort_value HAVING COUNT (mbe.id) > 1); + + -- Reingest the bib id's from array created previously + FOREACH bib_one IN ARRAY bib_ids + LOOP + PERFORM metabib.reingest_metabib_field_entries(bib_one, true, false, true); + END LOOP; + + -- Remove browse_entries that are duplicate and nothing linked to them + -- Generate an array of browse_entry id's to be deleted + SELECT INTO be_ids COALESCE(ARRAY_AGG(dbe.id), ARRAY[]::BIGINT[]) + FROM ( + SELECT mbe.id FROM metabib.browse_call_number_entry AS mbe + LEFT OUTER JOIN metabib.browse_call_number_entry_def_map AS mbedm + ON mbe.id = mbedm.entry + WHERE mbedm.id IS NULL + ) AS dbe; + + -- Delete duplicate browse_entry id's that don't link + FOREACH be_one IN ARRAY be_ids + LOOP + DELETE FROM metabib.browse_call_number_entry WHERE id = be_one; + END LOOP; + + SELECT INTO dup_count COUNT(kyle.sort_value) + FROM (SELECT COUNT(id), sort_value FROM metabib.browse_call_number_entry + GROUP BY sort_value + HAVING COUNT(id) > 1) as kyle; + + result_message := array_append(result_message, 'browse_call_number_entry duplicates = ' || dup_count); + + + RETURN result_message; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.remove_duplicate_browse_entries() + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/018.kmain-1128-ui_improvements.sql b/KCLS/sql/browse/combined/018.kmain-1128-ui_improvements.sql new file mode 100644 index 0000000000..3309758ad2 --- /dev/null +++ b/KCLS/sql/browse/combined/018.kmain-1128-ui_improvements.sql @@ -0,0 +1,197 @@ +BEGIN; + +-- KMAIN-1128 UI Improvements + +-- create_and_populate-authority-display_5xx_headings.sql +--------------------------------------------------------------------------------------- +-- Create authority.display_5xx_headings table +CREATE TABLE authority.display_5xx_headings ( + id serial NOT NULL, + w_subfield_0 character(1), + w_subfield_1 character(1), + w_subfield_2 character(1), + w_subfield_3 character(1), + heading text, + CONSTRAINT reference_headings_pkey PRIMARY KEY (id) +) +WITH ( + OIDS=FALSE +); +ALTER TABLE authority.display_5xx_headings + OWNER TO evergreen; + +-- Add data to authority.display_5xx_headings +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES (NULL,NULL,NULL,NULL,'See Also'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('a',NULL,NULL,NULL,'Earlier Heading'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('b',NULL,NULL,NULL,'Later Heading'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('g',NULL,NULL,NULL,'Narrower Term'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('r',NULL,NULL,NULL,''); + + +-- authority-get_5xx_heading.sql +--------------------------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION authority.get_5xx_heading(w_subfield text) + RETURNS text AS +$BODY$ +-- Function takes a w subfield as input. It returns the heading to be displayed for the +-- reference. The headings are stored in authority.display_5xx_headings. +-- The current version only looks at the first position (0) of the w subfield. It then looks +-- in authority.display_5xx_headings for a match and returns that heading. If no match is found +-- it selects from authority.display_5xx_headings where w subfield position 0 is null, which is +-- the default 5xx heading. (See Also) +-- +-- The table is built to be expandable for the future and this function could be modified to check +-- more positions. +DECLARE + subfield_0 text; + return_heading text; +BEGIN + SELECT substring(w_subfield FROM '.') INTO subfield_0; + + SELECT heading INTO return_heading + FROM authority.display_5xx_headings + WHERE w_subfield_0 = subfield_0; + + IF return_heading IS NULL THEN + SELECT heading INTO return_heading + FROM authority.display_5xx_headings + WHERE w_subfield_0 IS NULL; + END IF; + + RETURN return_heading; +END; +$BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION authority.get_5xx_heading(w_subfield text) + OWNER TO evergreen; + + +-- metabib-get_browse_author_entry_marc_record.sql +------------------------------------------------------------------ +-- Function: metabib.get_browse_author_entry_marc_record(bigint) + +-- DROP FUNCTION metabib.get_browse_author_entry_marc_record(bigint); + +CREATE OR REPLACE FUNCTION metabib.get_browse_author_entry_marc_record(browse_entry bigint) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse author entry table and returns the marc for the +-- authority that control the browse author entry. +DECLARE + marc text; +BEGIN + SELECT are.marc INTO marc + FROM authority.record_entry are + JOIN authority.simple_heading ash ON are.id = ash.record + JOIN metabib.browse_author_entry_simple_heading_map mbaeshm ON ash.id = mbaeshm.simple_heading + JOIN metabib.browse_author_entry mbae ON mbaeshm.entry = mbae.id + JOIN authority.control_set_authority_field acsaf ON ash.atag = acsaf.id + WHERE mbae.id = browse_entry AND acsaf.tag ILIKE '1__'; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_author_entry_marc_record(bigint) + OWNER TO evergreen; + + +-- metabib-get_browse_subject_entry_marc_record.sql +--------------------------------------------------------------------------------------- +-- Function: metabib.get_browse_subject_entry_marc_record(bigint) + +-- DROP FUNCTION metabib.get_browse_subject_entry_marc_record(bigint); + +CREATE OR REPLACE FUNCTION metabib.get_browse_subject_entry_marc_record(browse_entry bigint) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse subject entry table and returns the marc for the +-- authority that control the browse subject entry. +DECLARE + marc text; +BEGIN + SELECT are.marc INTO marc + FROM authority.record_entry are + JOIN authority.simple_heading ash ON are.id = ash.record + JOIN metabib.browse_subject_entry_simple_heading_map mbseshm ON ash.id = mbseshm.simple_heading + JOIN metabib.browse_subject_entry mbse ON mbseshm.entry = mbse.id + JOIN authority.control_set_authority_field acsaf ON ash.atag = acsaf.id + WHERE mbse.id = browse_entry AND acsaf.tag ILIKE '1__'; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_subject_entry_marc_record(bigint) + OWNER TO evergreen; + + +-- metabib-get_browse_series_entry_marc_record.sql +------------------------------------------------------------------------------------------- +-- Function: metabib.get_browse_series_entry_marc_record(bigint) + +-- DROP FUNCTION metabib.get_browse_series_entry_marc_record(bigint); + +CREATE OR REPLACE FUNCTION metabib.get_browse_series_entry_marc_record(browse_entry bigint) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse series entry table and returns the marc for the +-- authority that control the browse series entry. +DECLARE + marc text; +BEGIN + SELECT are.marc INTO marc + FROM authority.record_entry are + JOIN authority.simple_heading ash ON are.id = ash.record + JOIN metabib.browse_series_entry_simple_heading_map mbseshm ON ash.id = mbseshm.simple_heading + JOIN metabib.browse_series_entry mbse ON mbseshm.entry = mbse.id + JOIN authority.control_set_authority_field acsaf ON ash.atag = acsaf.id + WHERE mbse.id = browse_entry AND acsaf.tag ILIKE '1__'; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_series_entry_marc_record(bigint) + OWNER TO evergreen; + + +-- metabib.get_browse_entry_marc_record.sql +-------------------------------------------------------------------------------------------- +CREATE OR REPLACE FUNCTION metabib.get_browse_entry_marc_record(browse_entry bigint, search_class text) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse entry series of tables and a type to indicate what +-- table to look into. It will then fetch the marc record for that the authority that +-- controls the browse entry. +DECLARE + marc text; +BEGIN + CASE search_class + WHEN 'author' THEN marc = metabib.get_browse_author_entry_marc_record(browse_entry); + WHEN 'subject' THEN marc = metabib.get_browse_subject_entry_marc_record(browse_entry); + WHEN 'series' THEN marc = metabib.get_browse_series_entry_marc_record(browse_entry); + ELSE marc = NULL; + END CASE; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_entry_marc_record(bigint, text) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/019.kmain-1175-metabib-triggered_reingest_for_bib_id.sql b/KCLS/sql/browse/combined/019.kmain-1175-metabib-triggered_reingest_for_bib_id.sql new file mode 100644 index 0000000000..0ac2c5178f --- /dev/null +++ b/KCLS/sql/browse/combined/019.kmain-1175-metabib-triggered_reingest_for_bib_id.sql @@ -0,0 +1,30 @@ +BEGIN; +-- Function: metabib.triggered_reingest_for_bib_id(bigint) + +-- DROP FUNCTION metabib.triggered_reingest_for_bib_id(bigint); + +CREATE OR REPLACE FUNCTION metabib.triggered_reingest_for_bib_id(bib_id bigint) + RETURNS bigint AS +$BODY$ +DECLARE + reingest_count BIGINT; +BEGIN + + UPDATE config.internal_flag SET enabled = TRUE WHERE name = 'ingest.reingest.force_on_same_marc'; + + reingest_count = 0; + + UPDATE biblio.record_entry SET id = id WHERE id = bib_id; + reingest_count = reingest_count + 1; + + UPDATE config.internal_flag SET enabled = FALSE WHERE name = 'ingest.reingest.force_on_same_marc'; + + RETURN reingest_count; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.triggered_reingest_for_bib_id(bigint) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/020.kmain-1160-update_config_xml_transform_for_name_kcls.sql b/KCLS/sql/browse/combined/020.kmain-1160-update_config_xml_transform_for_name_kcls.sql new file mode 100644 index 0000000000..825fb2372a --- /dev/null +++ b/KCLS/sql/browse/combined/020.kmain-1160-update_config_xml_transform_for_name_kcls.sql @@ -0,0 +1,1219 @@ +BEGIN; + +-- Update for KMAIN-1160 to add '--' between subject subdivisions +UPDATE config.xml_transform +SET xslt=$$ + + + + + + + + 650 + subject + topic + abcdvxyz + abcdvxyz + + + 651 + subject + geographic + avxyz + avxyz + + + 655 + subject + genre + abcvxyz + abcvxyz + + + 630 + subject + uniftitle + adfgklmnoprstvxyz + adfgklmnoprstvxyz + + + 600 + subject + name + abcdfgjklmnopqrstuvxyz + abcdfgjklmnopqrstuvxyz + + + 610 + subject + corpname + abcdfgklmnoprstuvxyz + abcdfgklmnoprstuvxyz + + + 611 + subject + meeting + acdefgjklnpqstuvxyz + acdefgjklnpqstuvxyz + + + 490 + series + seriestitle + a + a + + + 800 + series + seriestitle + tflmnoprs + tflmnoprs + + + 810 + series + seriestitle + tflmnoprs + tflmnoprs + + + 830 + series + seriestitle + adfgklmnoprst + adfgklmnoprst + + + 100 + author + personal + abcdq + abcdq + + + 110 + author + corporate + abcdn + ab + + + 111 + author + meeting + acdegng + acde + + + 700 + author + added_personal + abcdq + abcdq + + + 710 + author + corporate + ab + ab + + + 711 + author + meeting + acde + acde + + + 400 + author + added_personal + abcd + abcd + + + 410 + author + corporate + abcd + abcd + + + 411 + author + meeting + acdegq + acdega + + + 010 + identifier + lccn + a + + + + 010 + identifier + lccn + z + + + + 020 + identifier + isbn + a + + + + 020 + keyword + isbn + a + + + + 020 + identifier + isbn + z + + + + 020 + keyword + isbn + z + + + + 022 + identifier + issn + a + + + + 022 + keyword + issn + a + + + + 022 + identifier + issn + y + + + + 022 + keyword + issn + y + + + + 022 + identifier + issn + z + + + + 022 + keyword + issn + z + + + + 024 + identifier + upc + a + + + + 024 + keyword + upc + a + + + + 024 + identifier + upc + z + + + + 024 + keyword + upc + z + + + + 027 + identifier + tech_number + a + + + + 027 + keyword + tech_number + a + + + + 027 + identifier + tech_number + z + + + + 027 + keyword + tech_number + z + + + + 028 + identifier + tech_number + ab + + + + 028 + keyword + tech_number + ab + + + + 074 + identifier + sudoc + a + + + + 074 + keyword + govdoc + a + + + + 074 + identifier + sudoc + z + + + + 074 + keyword + govdoc + z + + + + 086 + identifier + sudoc + a + + + + 086 + keyword + govdoc + a + + + + 086 + identifier + sudoc + z + + + + 086 + keyword + govdoc + z + + + + 092 + identifier + bibcn + ab + + + + 099 + identifier + bibcn + a + + + + 100 + keyword + author + abcdq + + + + 110 + keyword + author + abcdn + + + + 111 + keyword + author + acdegnq + + + + 130 + keyword + title + abcefgijklmnopqrstuvwxyz + + + + 130 + title + uniform + abcefgijklmnopqrstuvwxyz + + + + 210 + keyword + title + abcefghijklmnopqrstuvwxyz + + + + 210 + title + abbreviated + abcefghijklmnopqrstuvwxyz + + + + 222 + title + magazine + a + + + + 240 + keyword + title + abcefgijklmnopqrstuvwxyz + + + + 240 + title + uniform + abcefgijklmnopqrstuvwxyz + + + + 245 + keyword + title + abefgijklmnopqrstuvwxyz + + + + 245 + title + proper + abefgijklmnopqrstuvwxyz + + + + 245 + keyword + author + c + + + + 245 + author + responsibility + c + + + + 246 + keyword + title + abcefgjklmnopqrstuvwxyz + + + + 246 + title + alternative + abcefgjklmnopqrstuvwxyz + + + + 247 + keyword + title + abcefgijklmnopqrstuvwxyz + + + + 247 + title + former + abcefgijklmnopqrstuvwxyz + + + + 260 + keyword + publisher + b + + + + 264 + keyword + publisher + b + + + + 400 + keyword + author + abcd + + + + 400 + keyword + title + ptv + + + + 400 + series + seriestitle + ptv + + + + 410 + author + corporate + abcde + + + + 410 + keyword + author + abcde + + + + 410 + keyword + title + ptv + + + + 410 + series + seriestitle + ptv + + + + 411 + author + conference + acdegq + + + + 411 + keyword + conference + acdegq + + + + 411 + keyword + title + ptv + + + + 411 + title + seriestitle + ptv + + + + 440 + keyword + title + abcefghijklmnopqrstuvwyz + + + + 440 + series + seriestitle + abcefghijklmnopqrstuvwyz + abcefghijklmnopqrstuvwyz + + + 490 + keyword + title + abcefghijklmnopqrstuvwyz + + + + 490 + series + seriestitle + abcefghijklmnopqrstuvwyz + + + + 490 + title + uniform + abcefghijklmnopqrstuvwyz + + + + 500 + keyword + notes + a + + + + 501 + keyword + notes + a + + + + 502 + keyword + notes + a + + + + 505 + keyword + notes + art + + + + 508 + keyword + notes + a + + + + 511 + keyword + notes + a + + + + 518 + keyword + notes + a + + + + 520 + keyword + notes + ab + + + + 522 + keyword + notes + a + + + + 533 + keyword + notes + af + + + + 534 + keyword + notes + af + + + + 545 + keyword + notes + ab + + + + 546 + keyword + notes + a + + + + 547 + keyword + notes + a + + + + 561 + keyword + notes + a + + + + 581 + keyword + notes + a + + + + 585 + keyword + notes + a + + + + 586 + keyword + notes + a + + + + 600 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 610 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 611 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 630 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 648 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 650 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 651 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 655 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 656 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 657 + keyword + subject + abcefghijklmnopqrstuvwxyz + + + + 658 + keyword + fiction + abcefghijklmnopqrstuvwxyz + + + + 690 + keyword + fiction + a + + + + 691 + keyword + fiction + a + + + + 692 + keyword + fiction + acdefgh + + + + 693 + keyword + fiction + acdefg + + + + 694 + keyword + fiction + a + + + + 694 + series + seriestitle + a + + + + 695 + keyword + fiction + ab2 + + + + 696 + keyword + fiction + a + + + + 697 + keyword + fiction + ay + + + + 698 + keyword + fiction + a + + + + 699 + keyword + fiction + abc + + + + 700 + keyword + added_author + abcdq + + + + 700 + keyword + added_title + fgklmnoprst + + + + 700 + title + added + fgklmnoprst + + + + 710 + keyword + added_author + abcdn + + + + 710 + keyword + added_title + fgklmnoprst + + + + 710 + title + added + fgklmnoprst + + + + 711 + keyword + added_author + acdegnq + + + + 711 + keyword + added_title + fklnpst + + + + 711 + title + added + fklnpst + + + + 730 + keyword + added_title + abcefgijklmnopqrstuvwyz + + + + 730 + title + added + abcefgijklmnopqrstuvwyz + + + + 740 + keyword + added_title + abcefgijklmnopqrstuvwyz + + + + 740 + title + added + abcefgijklmnopqrstuvwyz + + + + 780 + keyword + previous_title + st + + + + 780 + title + previous + st + + + + 785 + keyword + succeeding_title + st + + + + 785 + title + succeeding + st + + + + 800 + author + personal_series + abcdq + + + + 800 + keyword + series_author + abcdq + + + + 800 + keyword + series_title + fgklmnoprst + + + + 800 + series + seriestitle + fgklmnoprst + fgklmnoprst + + + 810 + author + corporate_series + abcdn + + + + 810 + keyword + series_author + abcdn + + + + 810 + series + seriestitle + abcdn + abcdn + + + 811 + author + conference_series + acdegnq + + + + 811 + keyword + series_author + acdegnq + + + + 811 + series + seriestitle + fklnpstv + fklnpstv + + + 830 + keyword + series_title + abcefgijklmnopqrstuvwxyz + + + + 830 + series + seriestitle + abcefgijklmnopqrstuvwxyz + abcefgijklmnopqrstuvwxyz + + + 938 + identifier + match_isbn + a + + + + + + + + + + + + + + + + + + + + + + + + abcdefghijklmnopqrstuvwxyz + + + + + + + + + + + + + + + + abcdefghijklmnopqrstuvwxyz + + -- + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +$$ +WHERE name = 'kcls'; + +COMMIT; diff --git a/KCLS/sql/browse/combined/021.kmain-1213-update_authority_generate_overlay_template.sql b/KCLS/sql/browse/combined/021.kmain-1213-update_authority_generate_overlay_template.sql new file mode 100644 index 0000000000..cc6aeb0900 --- /dev/null +++ b/KCLS/sql/browse/combined/021.kmain-1213-update_authority_generate_overlay_template.sql @@ -0,0 +1,103 @@ +BEGIN; + +-- Function: authority.generate_overlay_template(text) + +-- DROP FUNCTION authority.generate_overlay_template(text); + +CREATE OR REPLACE FUNCTION authority.generate_overlay_template(source_xml text) + RETURNS text AS +$BODY$ +DECLARE + cset INT; + main_entry authority.control_set_authority_field%ROWTYPE; + bib_field authority.control_set_bib_field%ROWTYPE; + auth_id INT DEFAULT oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', source_xml)::INT; + tmp_data XML; + replace_data XML[] DEFAULT '{}'::XML[]; + replace_rules TEXT[] DEFAULT '{}'::TEXT[]; + auth_field XML[]; + auth_i1 TEXT; + auth_i2 TEXT; +BEGIN + IF auth_id IS NULL THEN + RETURN NULL; + END IF; + + -- Default to the LoC controll set + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + -- if none, make a best guess + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( + SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marc::XML)::TEXT[]) + FROM authority.record_entry + WHERE id = auth_id + ) + LIMIT 1; + END IF; + + -- if STILL none, no-op change + IF cset IS NULL THEN + RETURN XMLELEMENT( + name record, + XMLATTRIBUTES('http://www.loc.gov/MARC21/slim' AS xmlns), + XMLELEMENT( name leader, '00881nam a2200193 4500'), + XMLELEMENT( + name datafield, + XMLATTRIBUTES( '905' AS tag, ' ' AS ind1, ' ' AS ind2), + XMLELEMENT( + name subfield, + XMLATTRIBUTES('d' AS code), + '901c' + ) + ) + )::TEXT; + END IF; + + FOR main_entry IN SELECT * FROM authority.control_set_authority_field acsaf WHERE acsaf.control_set = cset AND acsaf.main_entry IS NULL LOOP + auth_field := XPATH('//*[@tag="'||main_entry.tag||'"][1]',source_xml::XML); + auth_i1 = (XPATH('@ind1',auth_field[1]))[1]; + auth_i2 = (XPATH('@ind2',auth_field[1]))[1]; + IF ARRAY_LENGTH(auth_field,1) > 0 THEN + FOR bib_field IN SELECT * FROM authority.control_set_bib_field WHERE authority_field = main_entry.id LOOP + SELECT XMLELEMENT( -- XMLAGG avoids magical creation, but requires unnest subquery + name datafield, + XMLATTRIBUTES(bib_field.tag AS tag, auth_i1 AS ind1, auth_i2 AS ind2), + XMLAGG(UNNEST) + ) INTO tmp_data FROM UNNEST(XPATH('//*[local-name()="subfield"]', auth_field[1])); + replace_data := replace_data || tmp_data; + replace_rules := replace_rules || ( bib_field.tag || main_entry.sf_list || E'[0~\\)' || auth_id || '$]' ); + tmp_data = NULL; + END LOOP; + EXIT; + END IF; + END LOOP; + + SELECT XMLAGG(UNNEST) INTO tmp_data FROM UNNEST(replace_data); + + RETURN XMLELEMENT( + name record, + XMLATTRIBUTES('http://www.loc.gov/MARC21/slim' AS xmlns), + XMLELEMENT( name leader, '00881nam a2200193 4500'), + tmp_data, + XMLELEMENT( + name datafield, + XMLATTRIBUTES( '905' AS tag, ' ' AS ind1, ' ' AS ind2), + XMLELEMENT( + name subfield, + XMLATTRIBUTES('r' AS code), + ARRAY_TO_STRING(replace_rules,',') + ) + ) + )::TEXT; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION authority.generate_overlay_template(text) + OWNER TO evergreen; + + +COMMIT; diff --git a/KCLS/sql/browse/combined/022.kmain-1199-fix-auth-to-auth-link.sql b/KCLS/sql/browse/combined/022.kmain-1199-fix-auth-to-auth-link.sql new file mode 100644 index 0000000000..2657cd12c7 --- /dev/null +++ b/KCLS/sql/browse/combined/022.kmain-1199-fix-auth-to-auth-link.sql @@ -0,0 +1,56 @@ +BEGIN; + +--add sort_value column to authority.full_rec +ALTER TABLE authority.full_rec + ADD COLUMN sort_value text; + +-- Function: authority.flatten_marc(bigint) +-- DROP FUNCTION authority.flatten_marc(bigint); +CREATE OR REPLACE FUNCTION authority.flatten_marc(rid bigint) + RETURNS SETOF authority.full_rec AS +$BODY$ +DECLARE + auth authority.record_entry%ROWTYPE; + output authority.full_rec%ROWTYPE; + field RECORD; +BEGIN + SELECT INTO auth * FROM authority.record_entry WHERE id = rid; + + FOR field IN SELECT * FROM vandelay.flatten_marc( auth.marc ) LOOP + output.record := rid; + output.ind1 := field.ind1; + output.ind2 := field.ind2; + output.tag := field.tag; + output.subfield := field.subfield; + output.value := field.value; + output.sort_value := naco_normalize(field.value); + + RETURN NEXT output; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.flatten_marc(bigint) + OWNER TO evergreen; + +-- Function: authority.reingest_authority_full_rec(bigint) +-- DROP FUNCTION authority.reingest_authority_full_rec(bigint); +CREATE OR REPLACE FUNCTION authority.reingest_authority_full_rec(auth_id bigint) + RETURNS void AS +$BODY$ +BEGIN + DELETE FROM authority.full_rec WHERE record = auth_id; + INSERT INTO authority.full_rec (record, tag, ind1, ind2, subfield, value, sort_value) + SELECT record, tag, ind1, ind2, subfield, value, sort_value FROM authority.flatten_marc( auth_id ); + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.reingest_authority_full_rec(bigint) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/browse/combined/023.kmain-1204-update-authority-control-set-bib-field-metabib-field-map.sql b/KCLS/sql/browse/combined/023.kmain-1204-update-authority-control-set-bib-field-metabib-field-map.sql new file mode 100644 index 0000000000..62917b4320 --- /dev/null +++ b/KCLS/sql/browse/combined/023.kmain-1204-update-authority-control-set-bib-field-metabib-field-map.sql @@ -0,0 +1,3 @@ +-- KMAIN-1204 Sql Update. Series Fix +UPDATE authority.control_set_bib_field_metabib_field_map SET metabib_field = 1 +WHERE metabib_field = 5; diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/README b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/README new file mode 100644 index 0000000000..6d636bee85 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/README @@ -0,0 +1,7 @@ +These speed improvements are the work of CMD ticket #60669. Summary of changes: + +* Alter statistics on all browse_*_entry tables on column sort_value: this helps the selectivity of queries called in functions. +* Add expression indexes to all browse_*_entry tables on column sort_value. +* Add function metabib.browse_table_bounds which queries postgres table statistics to find boundaries of region containing browse_term. +* Modify existing browse functions to be more selective in the WHERE clause, and align with indexes. + diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/deploy.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/deploy.sql new file mode 100644 index 0000000000..d6315087a0 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/deploy.sql @@ -0,0 +1,44 @@ +begin; +\set ON_ERROR_STOP on + +\i tables.sql +\i metabib.browse_table_bounds.sql + +\i metabib.browse_author_pivot.sql +\i metabib.browse_author_bib_pivot.sql +\i metabib.browse_author_authority_refs_pivot.sql + +\i metabib.browse_title_pivot.sql +\i metabib.browse_title_bib_pivot.sql +\i metabib.browse_title_authority_refs_pivot.sql + +\i metabib.browse_call_number_bib_pivot.sql +\i metabib.browse_call_number_authority_refs_pivot.sql +\i metabib.browse_call_number_pivot.sql + +\i metabib.browse_subject_pivot.sql +\i metabib.browse_subject_bib_pivot.sql +\i metabib.browse_subject_authority_refs_pivot.sql + +\i metabib.browse_series_pivot.sql +\i metabib.browse_series_bib_pivot.sql +\i metabib.browse_series_authority_refs_pivot.sql + +\i metabib.browse.sql + +--test + +select * from metabib.browse_table_bounds('author','twain',11); +select * from metabib.browse_table_bounds('author','twain',100); +select * from metabib.browse_table_bounds('author','twain',100000); +select * from metabib.browse_table_bounds('author',chr(1),11); +select * from metabib.browse_table_bounds('author',U&'\+02ffff\+02ffff',11); +select * from metabib.browse_table_bounds('call_number','621.757',11); +select * from metabib.browse_table_bounds('title','moby dick',11); +select * from metabib.browse_table_bounds('subject','zoology',11); +select * from metabib.browse_table_bounds('series','загадки истории',11); + +SELECT * FROM metabib.browse('author', 'twain', '1', NULL, 't', NULL, 11); + +commit; + diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse.sql new file mode 100644 index 0000000000..a0252c81be --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse.sql @@ -0,0 +1,190 @@ +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance + LANGUAGE plpgsql +AS $function$ +DECLARE + v_bound_lower TEXT; + v_bound_upper TEXT; + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + --ver1.1 updated with kmain-806 + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + select bound_lower, bound_upper into v_bound_lower, v_bound_upper from metabib.browse_table_bounds(search_class,public.replace_ampersand(pivot_sort_value),result_limit); + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + +create temporary table tmp_metabib_browse +( + id bigint, + value text, + sort_value text, + --truncated_sort_value_noamp text, + value_noamp text +) on commit drop; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied. + core_query := ' +insert into tmp_metabib_browse +( + id, + value, + sort_value, + --truncated_sort_value_noamp, + value_noamp +) +SELECT mbe.id, + mbe.value, + public.replace_ampersand(mbe.sort_value), + --public.replace_ampersand(mbe.truncated_sort_value), --this column is identical to sort_value + public.replace_ampersand(mbe.value) + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + )'; + IF search_class != 'call_number' THEN + + core_query := core_query || ' OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + )'; + + END IF; + core_query := core_query || $$ +) AND public.replace_ampersand(mbe.sort_value) between $$ || quote_literal(public.replace_ampersand(v_bound_lower)) || ' and ' || quote_literal(public.replace_ampersand(v_bound_upper)); + execute core_query; + -- This is the variant of the query for browsing backward. + back_query := $$select id, value, sort_value from tmp_metabib_browse +where sort_value <= $$ || quote_literal(public.replace_ampersand(pivot_sort_value)) || $$ +order by sort_value desc, value_noamp desc$$; + + -- This variant browses forward. + forward_query := $$select id, value, sort_value from tmp_metabib_browse +where sort_value > $$ || quote_literal(public.replace_ampersand(pivot_sort_value)) || $$ +order by sort_value, value_noamp$$; + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_authority_refs_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_authority_refs_pivot.sql new file mode 100644 index 0000000000..ddb3ccc612 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_authority_refs_pivot.sql @@ -0,0 +1,18 @@ +CREATE OR REPLACE FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbae.id + FROM metabib.browse_author_entry mbae + JOIN metabib.browse_author_entry_simple_heading_map mbaeshm ON ( mbaeshm.entry = mbae.id ) + JOIN authority.simple_heading ash ON ( mbaeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbae.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbae.sort_value) <= (select bound_upper from metabib.browse_table_bounds('author',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbae.sort_value), public.replace_ampersand(mbae.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_bib_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_bib_pivot.sql new file mode 100644 index 0000000000..78d39d02f9 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_bib_pivot.sql @@ -0,0 +1,16 @@ +CREATE OR REPLACE FUNCTION metabib.browse_author_bib_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbe.id + FROM metabib.browse_author_entry mbe + JOIN metabib.browse_author_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbe.sort_value) <= (select bound_upper from metabib.browse_table_bounds('author',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_pivot.sql new file mode 100644 index 0000000000..85a23a26d5 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_author_pivot.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION metabib.browse_author_pivot(search_field integer[], browse_term text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver1.0 + SELECT id FROM metabib.browse_author_entry + WHERE id IN ( + metabib.browse_author_bib_pivot(search_field, browse_term), + metabib.browse_author_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY replace_ampersand(sort_value), value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_authority_refs_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_authority_refs_pivot.sql new file mode 100644 index 0000000000..b6d3183d19 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_authority_refs_pivot.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or browse term + --reference to public.replace_ampersand has no function except to maintain compatibility with indexes expected by calling functions. + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE public.replace_ampersand(mbe.sort_value) >= public.naco_normalize_keep_decimal($1, '') + and public.replace_ampersand(mbe.sort_value) <= (select bound_upper from metabib.browse_table_bounds('call_number',public.naco_normalize_keep_decimal($1, ''),1)) + ORDER BY public.replace_ampersand(mbe.sort_value), mbe.value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_bib_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_bib_pivot.sql new file mode 100644 index 0000000000..f5b72a40a0 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_bib_pivot.sql @@ -0,0 +1,12 @@ +CREATE OR REPLACE FUNCTION metabib.browse_call_number_bib_pivot(text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or browse term + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE replace_ampersand(mbe.sort_value) >= public.naco_normalize_keep_decimal($1, '') + and replace_ampersand(mbe.sort_value) <= (select bound_upper from metabib.browse_table_bounds('call_number',public.naco_normalize_keep_decimal($1, ''),1)) + ORDER BY replace_ampersand(mbe.sort_value), mbe.value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_pivot.sql new file mode 100644 index 0000000000..5aff1109f4 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_call_number_pivot.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION metabib.browse_call_number_pivot(browse_term text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver1.0 + SELECT id FROM metabib.browse_call_number_entry + WHERE id IN ( + metabib.browse_call_number_bib_pivot(browse_term), + metabib.browse_call_number_authority_refs_pivot(browse_term) + ) + ORDER BY replace_ampersand(sort_value), value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_authority_refs_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_authority_refs_pivot.sql new file mode 100644 index 0000000000..728f2bd2ef --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_authority_refs_pivot.sql @@ -0,0 +1,18 @@ +CREATE OR REPLACE FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbse.id + FROM metabib.browse_series_entry mbse + JOIN metabib.browse_series_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbse.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbse.sort_value) <= (select bound_upper from metabib.browse_table_bounds('series',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbse.sort_value), public.replace_ampersand(mbse.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_bib_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_bib_pivot.sql new file mode 100644 index 0000000000..9e7191e5f4 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_bib_pivot.sql @@ -0,0 +1,16 @@ +CREATE OR REPLACE FUNCTION metabib.browse_series_bib_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbe.id + FROM metabib.browse_series_entry mbe + JOIN metabib.browse_series_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbe.sort_value) <= (select bound_upper from metabib.browse_table_bounds('series',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_pivot.sql new file mode 100644 index 0000000000..e1233fdf61 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_series_pivot.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION metabib.browse_series_pivot(search_field integer[], browse_term text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver1.0 + SELECT id FROM metabib.browse_series_entry + WHERE id IN ( + metabib.browse_series_bib_pivot(search_field, browse_term), + metabib.browse_series_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY replace_ampersand(sort_value), value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_authority_refs_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_authority_refs_pivot.sql new file mode 100644 index 0000000000..89f965cebe --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_authority_refs_pivot.sql @@ -0,0 +1,18 @@ +CREATE OR REPLACE FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbse.id + FROM metabib.browse_subject_entry mbse + JOIN metabib.browse_subject_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbse.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbse.sort_value) <= (select bound_upper from metabib.browse_table_bounds('subject',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbse.sort_value), public.replace_ampersand(mbse.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_bib_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_bib_pivot.sql new file mode 100644 index 0000000000..c4b536f32f --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_bib_pivot.sql @@ -0,0 +1,16 @@ +CREATE OR REPLACE FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbe.id + FROM metabib.browse_subject_entry mbe + JOIN metabib.browse_subject_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbe.sort_value) <= (select bound_upper from metabib.browse_table_bounds('subject',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_pivot.sql new file mode 100644 index 0000000000..21531ea459 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_subject_pivot.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION metabib.browse_subject_pivot(search_field integer[], browse_term text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver1.0 + SELECT id FROM metabib.browse_subject_entry + WHERE id IN ( + metabib.browse_subject_bib_pivot(search_field, browse_term), + metabib.browse_subject_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY replace_ampersand(sort_value), value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_table_bounds.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_table_bounds.sql new file mode 100644 index 0000000000..aade21947a --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_table_bounds.sql @@ -0,0 +1,56 @@ +create or replace function metabib.browse_table_bounds( + search_class text, + browse_term text, + result_limit integer, + OUT bound_lower text, + OUT bound_upper text +) + RETURNS record AS +$BODY$ +declare + histogram text[]; + table_name text; + table_rows real; + bucket_width real; + padding int; + CHAR_MIN constant text := chr(1); + CHAR_MAX constant text := U&'\+02ffff\+02ffff'; --http://en.wikipedia.org/wiki/Unicode +begin + if search_class not in ('author','title','subject','series','call_number') then + raise 'Invalid value for argument search_class.'; + else + table_name := 'browse_' || search_class || '_entry'; + end if; + select reltuples into table_rows from pg_class where relname = table_name + ; + select histogram_bounds::text::text[] into histogram + from pg_stats s + where tablename = table_name and attname = 'sort_value' + ; + bucket_width := table_rows / array_length(histogram,1); + if bucket_width < (result_limit * 2) then + bound_lower := CHAR_MIN; + bound_upper := CHAR_MAX; + else + padding := (result_limit / bucket_width)::int + 1; + select lag2, lead2 into bound_lower, bound_upper from + ( + with bounds as (select unnest(histogram) as boundary) + select + lag(boundary,padding + 1,CHAR_MIN) over (order by boundary) as lag2, + lag(boundary,padding,CHAR_MIN) over (order by boundary) as lag1, + boundary as lead1, + lead(boundary,padding,CHAR_MAX) over (order by boundary) as lead2 + from bounds + ) sub1 + where browse_term between lag1 and lead1; + end if; + return; +end; +$BODY$ + LANGUAGE plpgsql STABLE + STRICT + COST 100; +ALTER FUNCTION metabib.browse_table_bounds(text, text, integer) OWNER TO evergreen; +comment on function metabib.browse_table_bounds(text, text, integer) is 'Query postgres table statistics to find boundaries of region containing browse_term.'; + diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_authority_refs_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_authority_refs_pivot.sql new file mode 100644 index 0000000000..273cd27bb2 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_authority_refs_pivot.sql @@ -0,0 +1,18 @@ +CREATE OR REPLACE FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver2.0 updated with kmain-762: wrapped values in public.replace_ampersand function + SELECT mbte.id + FROM metabib.browse_title_entry mbte + JOIN metabib.browse_title_entry_simple_heading_map mbteshm ON ( mbteshm.entry = mbte.id ) + JOIN authority.simple_heading ash ON ( mbteshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbte.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbte.sort_value) <= (select bound_upper from metabib.browse_table_bounds('title',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbte.sort_value), public.replace_ampersand(mbte.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_bib_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_bib_pivot.sql new file mode 100644 index 0000000000..647cf95b17 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_bib_pivot.sql @@ -0,0 +1,16 @@ +CREATE OR REPLACE FUNCTION metabib.browse_title_bib_pivot(integer[], text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_title_entry mbe + JOIN metabib.browse_title_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + and public.replace_ampersand(mbe.sort_value) <= (select bound_upper from metabib.browse_table_bounds('title',public.replace_ampersand(public.naco_normalize($2)),1)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_pivot.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_pivot.sql new file mode 100644 index 0000000000..b1b2b052b8 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/metabib.browse_title_pivot.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION metabib.browse_title_pivot(search_field integer[], browse_term text) + RETURNS bigint + LANGUAGE sql + STABLE +AS $function$ + --ver1.0 + SELECT id FROM metabib.browse_title_entry + WHERE id IN ( + metabib.browse_title_bib_pivot(search_field, browse_term), + metabib.browse_title_authority_refs_pivot(search_field,browse_term) + ) + ORDER BY replace_ampersand(sort_value), value LIMIT 1; +$function$ diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/tables.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/tables.sql new file mode 100644 index 0000000000..5ad1066127 --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/tables.sql @@ -0,0 +1,32 @@ +drop index if exists metabib.browse_author_entry_sort_value_noamp; +drop index if exists metabib.browse_title_entry_sort_value_noamp; +drop index if exists metabib.browse_subject_entry_sort_value_noamp; +drop index if exists metabib.browse_series_entry_sort_value_noamp; +drop index if exists metabib.browse_call_number_entry_sort_value_noamp; + +drop index if exists metabib.browse_author_entry_truncated_sort_value_noamp; +drop index if exists metabib.browse_title_entry_truncated_sort_value_noamp; +drop index if exists metabib.browse_subject_entry_truncated_sort_value_noamp; +drop index if exists metabib.browse_series_entry_truncated_sort_value_noamp; +drop index if exists metabib.browse_call_number_entry_truncated_sort_value_noamp; + +alter table metabib.browse_author_entry alter column sort_value set statistics 1000; +alter table metabib.browse_title_entry alter column sort_value set statistics 1000; +alter table metabib.browse_subject_entry alter column sort_value set statistics 1000; +alter table metabib.browse_series_entry alter column sort_value set statistics 1000; +alter table metabib.browse_call_number_entry alter column sort_value set statistics 1000; + +analyze metabib.browse_author_entry; +analyze metabib.browse_title_entry; +analyze metabib.browse_subject_entry; +analyze metabib.browse_series_entry; +analyze metabib.browse_call_number_entry; + +create index browse_author_entry_sort_value_noamp on metabib.browse_author_entry ((public.replace_ampersand(sort_value))); +create index browse_title_entry_sort_value_noamp on metabib.browse_title_entry ((public.replace_ampersand(sort_value))); +create index browse_subject_entry_sort_value_noamp on metabib.browse_subject_entry ((public.replace_ampersand(sort_value))); +create index browse_series_entry_sort_value_noamp on metabib.browse_series_entry ((public.replace_ampersand(sort_value))); +create index browse_call_number_entry_sort_value_noamp on metabib.browse_call_number_entry ((public.replace_ampersand(sort_value))); + +create index authority_full_rec_sort_value_index on authority.full_rec (sort_value); + diff --git a/KCLS/sql/browse/combined/browse_speed_improvements_CMD/test.sql b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/test.sql new file mode 100644 index 0000000000..a605295b7c --- /dev/null +++ b/KCLS/sql/browse/combined/browse_speed_improvements_CMD/test.sql @@ -0,0 +1,14 @@ +\timing on +select metabib.browse_author_bib_pivot('{118,10,9,236,7,235,205,8,232,217}', 'abe'); +select metabib.browse_author_pivot('{118,10,9,236,7,235,205,8,232,217}', 'abe'); +SELECT * FROM metabib.browse('author', 'twain', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('author', '+', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('author', 'abe', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('author', 'zamora', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('author', 'zzzz', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('author', 'Αἰσχύλος', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('id|bibcn', '621.757', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('title', 'moby dick', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('subject', 'zoology', '1', NULL, 't', NULL, 11); +SELECT * FROM metabib.browse('series', 'загадки истории', '1', NULL, 't', NULL, 11); + diff --git a/KCLS/sql/browse/combined/kmain212.testsForSQL b/KCLS/sql/browse/combined/kmain212.testsForSQL new file mode 100644 index 0000000000..9a0b9a26a8 --- /dev/null +++ b/KCLS/sql/browse/combined/kmain212.testsForSQL @@ -0,0 +1,146 @@ +SELECT metabib.browse('author','j'); +SELECT metabib.browse('id|bibcn','j'); + +--search_field +SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'author'; -- {10,8,7,205,217,232,235,236,9,118} +SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; -- {25} + +--pivot_id +SELECT metabib.browse_author_pivot((SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'author'), 'j'); -- 1908016 + +SELECT metabib.browse_call_number_pivot((SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'), 'j'); -- 5 + +--pivot_sort_value, pivot_sort_fallback +SELECT truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = 1908016; -- "j eric", "J., Eric." + +SELECT truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = 5; -- "j rowling", "J ROWLING" + +SELECT quote_literal('{10,8,7,205,217,232,235,236,9,118}') +SELECT cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[]) + +--forward_query author +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_author_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_author_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal('j eric') ORDER BY mbe.sort_value, mbe.value --LOTS! + +--forward_query call_number / back doesn't get anything +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_call_number_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_call_number_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast('{25}' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast('{25}' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal('j') ORDER BY mbe.sort_value, mbe.value --5,6 + +--staged browse author +SELECT * FROM metabib.staged_browse( + 'SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_author_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_author_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast(''{10,8,7,205,217,232,235,236,9,118}'' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast(''{10,8,7,205,217,232,235,236,9,118}'' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal(''j eric'') ORDER BY mbe.sort_value, mbe.value', + cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[]), + null, null, FALSE, 100, FALSE, 5, 9, + 'author' + ) ORDER BY row_number DESC; + +--staged browse call_number +SELECT * FROM metabib.staged_browse( + 'SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_call_number_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_call_number_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast(''{25}'' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast(''{25}'' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal(''j'') ORDER BY mbe.sort_value, mbe.value', + cast('{25}' AS integer[]), + null, null, FALSE, 100, FALSE, 5, 9, + 'call_number' + ) ORDER BY row_number DESC; + + + +-- gather aggregate +SELECT + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = 2087343 + AND def = ANY(cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[]));--"{959591}" + +SELECT + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_call_number_entry_def_map + WHERE entry = 6 + AND def = ANY(cast('{25}' AS integer[]));--"{132163}" \ No newline at end of file diff --git a/KCLS/sql/browse/combined/reingest/data_update_driver.html b/KCLS/sql/browse/combined/reingest/data_update_driver.html new file mode 100644 index 0000000000..59171ab0bb --- /dev/null +++ b/KCLS/sql/browse/combined/reingest/data_update_driver.html @@ -0,0 +1,449 @@ + + + + +data_update_driver.pl + + + + + + + + + + + +

+

+
+

Name

+

data_update_driver.pl

+

+

+
+

Description

+

The purpose for this script is to take SQL statements that act on a lot of +data (e.g. long UPDATEs) and break them into reasonable sized chunks. The +first partition is by the number of processes you want to run. The size is +the total number of keys in the table divided by the number of processes.

+

The second partition is the number of rows you want to process at a time +within a first partition. For instance, the number of keys and processes +may create a first partition of 10,000. Within that, you might want to +process 500 rows at a time.

+

The script will start a new child process for each of the first partitions +and within those, will process n number of rows, n and processes +being configurable.

+

+

+
+

Modules

+

Modules used and what for.

+

Note: to load a new module into your local system, execute

+
+        perl -MCPAN -e shell
+        cpan> Module::Name
+        cpan> quit
+
+
DBI
+ +
+

All interactions with the database and SQL.

+
+
DBD::Pg
+ +
+

Specific protocol for PostGreSQL.

+
+
Parallel::ForkManager
+ +
+

Runs up to n number of processes in parallel. n is set by --max-processes.

+
+
File::Basename
+ +
+

Use basename() to strip off path and extention of a file name.

+
+
Getopt::Long
+ +
+

Retrieve long command line options (e.g. --xxx) and do simple validation.

+
+
Term::ReadKey
+ +
+

Used to hide console echoing while typing in passwords.

+
+
Pod::Usage
+ +
+

Used to display usage messages that are pulled from the program's POD.

+
+
+

+

+
+

Usage

+
+        post_update_driver.pl [--dir <directory>] [--finished-dir <dir>]
+                [--database <db_name>] [--host <host_name>] 
+                [--user <user_name>] [--password <password>] 
+                [--exit-on-error] [--max-processes <number>] 
+                [--rows <number>]
+

+

+
+

Arguments

+
+
dir
+ +
+

The directory containing the update scripts. It defaults to the current +directory.

+
+
finished-dir
+ +
+

The directory where the scripts are moved to when they finish correctly. It +defaults to a folder called 'finished'.

+
+
database
+ +
+

The database name.

+
+
host
+ +
+

The host name or IP address for the database.

+
+
user
+ +
+

The user name for the database.

+
+
password
+ +
+

The password for the database. If none is provided, it will ask for one.

+
+
exit-on-error
+ +
+

Flag: should the script stop when it encounters an SQL error? If not, it will +continue processing. Regardless, an error file is created with the extension +'.err' that holds the error message.

+
+
max-processes
+ +
+

Number: what is the maximum number of processes to run in parallel? The +default is four.

+
+
rows
+ +
+

The number of rows the SQL script should process at a time. The default +is 500.

+
+
help
+ +
+

Display usage and exit.

+
+
+

+

+
+

Pre-loop

+

Get command line options. Get password if none is supplied. Exit if dir +does not exist. If finished_dir does not exist, create it. Setup database +parameters. Test database parameters to see if they connect correctly. If +exit_on_error is set, make a callback for the parent process so it will die +if a child processes returns an error code. Remove all error files. These +would have been created by a previous run.

+

+

+
+

Main

+

Roll through each data file in dir that ends with _data.sql. Read the +file line by line. The first comment (--) is the script's description. +The first line that begins "SELECT MAX" is considered the id min and max +SQL, all in one line. The id ranges are determined by +executing this SQL. The partition size is determined by diving the total +number of keys by max-process.

+

This script is responsible for creating its own wrapper function to be called +with a start id and end id, if needed. When the script encounters a data line +that begins +"CREATE [OR REPLACE] FUNCTION", it will start collecting lines. It will end +when it finds a line that starts with "$$ LANGUAGE". All of the lines between +these two, inclusive, are the create wrapper function script. The wrapper +function should call the update function passing starting and ending ID. You +may not need a wrapper function if you are calling an UPDATE direectly.

+

The data line that starts with "DROP FUNCTION" is considered SQL to drop the +wrapper function, if needed. It should be one line only. You do not need this +SQL if you are not using the CREATE FUNCTION SQL.

+

If a data line starts with "ALTER TABLE", it is considered the enable/disable +triggers SQL statement. All triggers are disable before running the updates +and enabled afterward.

+

After all of the above lines +are removed from consideration, what remains is the actual SQL update. It is +often just a SELECT statement that calls the wrapper function with the place +holders "~start_id~" and "~end_id~". This script will replace them with the +values it calculates.

+

Comments and blank lines are ignored.

+

Then, start a loop for each partition size and +start a child process for each one. Within each partision, execute the SQL +on only rows number of rows. This is determined by setting the starting +and ending ID. Since IDs aren't always sequential, there may be less that +rows number of rows updated.

+

After all partitions execute the file script is moved +to the finished_dir folder. File scripts that encountered errors stay in +dir with their error files. Triggers are enabled.

+

A sample data file might look like this:

+
+        SELECT MAX(id), MIN(id) from schema.some_table;
+        
+        ALTER TABLE schema.some_table DISABLE TRIGGER ALL;
+        
+        UPDATE schema.some_table SET col_name = something 
+        WHERE id >= ~start_id~ AND id < ~end_id~;
+

A sample file that updates using a standard function in Evergreen might look +like this:

+
+        CREATE OR REPLACE FUNCTION schema.wrapper_function(start_id BIGINT, end_id BIGINT) 
+        RETURNS void AS $$
+        DECLARE
+                rec RECORD;
+        BEGIN
+                FOR rec IN SELECT id, some_col FROM schema.table_to_update WHERE id >= start_id AND id < end_id 
+                LOOP
+                        PERFORM schema.update_function( rec.id, rec.some_col );
+                END LOOP;
+        END;
+        $$ LANGUAGE plpgsql;
+        
+        DROP FUNCTION IF EXISTS schema.wrapper_function(BIGINT, BIGINT);
+        
+        SELECT MAX(id), MIN(id) from schema.table_to_update;
+        
+        ALTER TABLE schema.some_table DISABLE TRIGGER ALL;
+        
+        SELECT schema.wrapper_function(~start_id~, ~end_id~);
+

+

+
+

Subroutines

+

+

+

get_db_handle

+

Get a database handle

+
+
Parameters
+ +
+

$db_params - reference to several DB parameters

+
+
Returns
+ +
+

$dbh - database handle or zero

+
+
+

+

+

run_sql

+

Execute a non-SELECT SQL statement and capture any error output

+
+
Parameters
+ +
+

$db_params - the DB parameters (ref to hash)

+

$sql - the SQL statement

+

$file - the file name

+

$desc - a description of the task (first comment)

+

$start_d - the starting ID when the error occurred or zero

+
+
Returns
+ +
+

1 = success, 0 = failure

+
+
Side Effects
+ +
+

Creates a file with the extension .err if there is an error executing the SQL

+
+
+

+

+

run_select_sql

+

Execute a SELECT SQL statement and fetch one column

+
+
Parameters
+ +
+

$db_params - the DB parameters (ref to hash)

+

$sql - the SQL statement

+

$file - the data input file name

+

$desc - a description of the task (first comment)

+

$start_id - the starting ID when the error occurred or zero

+
+
Returns
+ +
+

An array of column values in list context, or a reference to the array in +scalar context

+
+
Side Effects
+ +
+

Creates a file with the extension .err if there is an error executing the SQL

+
+
+

+

+

get_error_file

+

Create and open an error file. Put in a timestamp. The error file name is +the file name with the extention of .err.

+
+
Parameters
+ +
+

$file - the file name to create the error file for

+

$start_id - the starting ID when the error occurred or zero

+
+
Returns
+ +
+

An array in list context; a reference to an array in scalar context

+

[0] $fail_fh - the file handle of the openned error file

+

[1] $error_file - the error file name

+
+
+

+

+

print_time

+

Print time elapsed in hours, minutes, and seconds

+
+
Parameters
+ +
+

$start - the start time, taken from the time() function

+
+
Side Effects
+ +
+

Prints elasped time to the standand out

+
+
+

+

+

create_helper_func

+

Create any helper functions needed by the update. The input data file should +contain a commented DROP statement what will drop the function when it's not +needed. For example:

+
+        -- DROP FUNCTION IF EXISTS schema.some_function(BIGINT, TEXT)
+        
+        CREATE OR REPLACE FUNCTION schema.some_function(id BIGINT, marc TEXT)
+        ...
+
+
Parameters
+ +
+

$dir - the source directory for the input data files

+

$db_params - a hash reference to the DB parameters

+
+
Returns
+ +
+

In list context, an array of SQL DROP statements that will remove the helper +functions at the end of the update. In scalar context, a reference to that array.

+
+
Side Effects
+ +
+

An error file is created if an error in encountered.

+
+
+

+

+

able_all_triggers

+

Enable/Disable triggers on a table. The SQL is pulled from the input data +file line that begins "ALTER TABLE".

+
+
Paramters
+ +
+

$able - The word ENABLE or DISABLE, depending on what you want to do to +the triggers. Defaults to DISABLE.

+

$range_sql - The SQL statement that gets the ID ranges, previously +extracted from the input data file.

+

$file - The name of the input data file.

+

$db_params - The DB parameters (ref to hash)

+
+
Side Effects
+ +
+

Enables or disables triggers for a table.

+
+
+

+

+

parse_input_file

+

Parse the input data file for different SQL statements and return each +statement.

+
+
Parameters
+ +
+

$input_fh - a file handle opened to the input file

+

$file - the input file name

+
+
Returns
+ +
+

In array context, an array of all the different SQL statements parsed. In +scalar context, a reference to that array.

+

[0] $sql - the main updating SQL statement(s)

+

[1] $desc - the description of this task

+

[2] $range_sql - the SQL statement that gets the ID ranges

+

[3] $create_func_sql - the SQL to create a wrapper function, if any

+

[4] $drop_func_sql - the SQL statement that drops the wrapper function, if any

+

[5] $able_trigger - the SQL to enable/disable all triggers on the update table

+
+
+ + + + diff --git a/KCLS/sql/browse/combined/reingest/data_update_driver.pl b/KCLS/sql/browse/combined/reingest/data_update_driver.pl new file mode 100644 index 0000000000..af2cff34a2 --- /dev/null +++ b/KCLS/sql/browse/combined/reingest/data_update_driver.pl @@ -0,0 +1,991 @@ +#!perl -w + +=pod + +=head1 Name + +data_update_driver.pl + +=head1 Description + +The purpose for this script is to take SQL statements that act on a lot of +data (e.g. long UPDATEs) and break them into reasonable sized chunks. The +first partition is by the number of processes you want to run. The size is +the total number of keys in the table divided by the number of processes. + +The second partition is the number of rows you want to process at a time +within a first partition. For instance, the number of keys and processes +may create a first partition of 10,000. Within that, you might want to +process 500 rows at a time. + +The script will start a new child process for each of the first partitions +and within those, will process I number of rows, I and processes +being configurable. + +=cut + +use strict; +use warnings; +use v5.8; +use integer; + +=head1 Modules + +Modules used and what for. + +Note: to load a new module into your local system, execute + + perl -MCPAN -e shell + cpan> Module::Name + cpan> quit + +=over 4 + +=item DBI + +All interactions with the database and SQL. + +=item DBD::Pg + +Specific protocol for PostGreSQL. + +=item Parallel::ForkManager + +Runs up to I number of processes in parallel. I is set by B<--max-processes>. + +=item File::Basename + +Use basename() to strip off path and extention of a file name. + +=item Getopt::Long + +Retrieve long command line options (e.g. --xxx) and do simple validation. + +=item Term::ReadKey + +Used to hide console echoing while typing in passwords. + +=item Pod::Usage + +Used to display usage messages that are pulled from the program's POD. + +=back + +=cut + +use DBI; +use DBD::Pg; +use Parallel::ForkManager; +use File::Basename; +use Getopt::Long; +use Term::ReadKey; +use Pod::Usage; + +=head1 Usage + + post_update_driver.pl [--dir ] [--finished-dir ] + [--database ] [--host ] [--port ] + [--user ] [--password ] + [--exit-on-error] [--max-processes ] + [--rows ] + +=head1 Arguments + +=over 4 + +=item B + +The directory containing the update scripts. It defaults to the current +directory. + +=item B + +The directory where the scripts are moved to when they finish correctly. It +defaults to a folder called 'finished'. + +=item B + +The database name. + +=item B + +The host name or IP address for the database. + +The port number. + +=item B + +The port number for the database. + +=item B + +The user name for the database. + +=item B + +The password for the database. If none is provided, it will ask for one. + +=item B + +Flag: should the script stop when it encounters an SQL error? If not, it will +continue processing. Regardless, an error file is created with the extension +'.err' that holds the error message. + +=item B + +Number: what is the maximum number of processes to run in parallel? The +default is four. + +=item B + +The number of rows the SQL script should process at a time. The default +is 500. + +=item B + +Display usage and exit. + +=back + +=cut + +$| = 1; #auto flush + +my $dir = '.'; +my $finished_dir = 'finished'; +my $database = ''; +my $host = ''; +my $port = 5432; # PostGres's default port +my $user = ''; +my $password = ''; +my $max_processes = 6; +my $rows = 500; +my $help; +my $exit_on_error; + +# Use Getopt::Long to get the command line options. Use the POD section +# "Usage" if an option is entered incorrectly +GetOptions( + 'help!' => \$help, # default is false... + 'exit-on-error!' => \$exit_on_error, + 'dir=s' => \$dir, # strings... + 'finished-dir=s' => \$finished_dir, + 'database=s' => \$database, + 'host=s' => \$host, + 'port=s' => \$port, + 'user=s' => \$user, + 'password=s' => \$password, + 'max-processes=i' => \$max_processes, # numeric + 'rows=i' => \$rows +) or pod2usage( -verbose => 99, -sections => [ 'Usage' ], -exitval => 2 ); + +# Print the POD Usage and Arguments sections if the help flag is up +if ($help) { + pod2usage( + -verbose => 99, + -sections => [ 'Usage', 'Arguments' ], + -exitval => 1 ); +} + +=head1 Pre-loop + +Get command line options. Get password if none is supplied. Exit if B +does not exist. If B does not exist, create it. Setup database +parameters. Test database parameters to see if they connect correctly. If +B is set, make a callback for the parent process so it will die +if a child processes returns an error code. Remove all error files. These +would have been created by a previous run. + +=cut + +# Get password if not supplied +unless ($password) { + print "Type your password: "; + ReadMode('noecho'); # don't display characters while typing + chomp($password = ); + ReadMode(0); # back to normal + print "\n"; +} + +# Check the directories +$dir =~ s|\\|/|g; # backslashes to slashes +$dir =~ s|/$||; # remove trailing slash +$finished_dir =~ s|\\|/|; +$finished_dir =~ s|/$||; + +unless ( -d $dir ) { + die "$dir does not exist\n"; +} + +unless ( -d $finished_dir ) { + mkdir $finished_dir or die "Could not create $finished_dir\n$!\n"; +} + +# Database connect info +my $db_params = { + platform => 'Pg', # Always PostGreSQL + database => $database, + host => $host, + port => $port, + user => $user, + pw => $password +}; + +# Check that database info is correct +my $test_dbh = get_db_handle( $db_params ); +$test_dbh->disconnect; + +my $pm = Parallel::ForkManager->new($max_processes); + +# Callback that checks the exit status of the children. +# If we should exit on error, tell the parent to die. +if ($exit_on_error) { + $pm->run_on_finish( + sub { + my ($pid, $exit_code, $ident) = @_; + + if ($exit_code == 1) { + die "Child process encountered an error in the SQL\n"; + } elsif ($exit_code == 2) { + die "Child process encountered an error during rename\n"; + } + } + ); +} + +print "Removing error files...\n"; +unlink glob "$dir/*.err"; + +=head1 Main + +Roll through each data file in B that ends with _data.sql. Read the +file line by line. The first comment (--) is the script's description. +The first line that begins "SELECT MAX" is considered the id min and max +SQL, all in one line. The id ranges are determined by +executing this SQL. The partition size is determined by diving the total +number of keys by B. + +This script is responsible for creating its own wrapper function to be called +with a start id and end id, if needed. When the script encounters a data line +that begins +"CREATE [OR REPLACE] FUNCTION", it will start collecting lines. It will end +when it finds a line that starts with "$$ LANGUAGE". All of the lines between +these two, inclusive, are the create wrapper function script. The wrapper +function should call the update function passing starting and ending ID. You +may not need a wrapper function if you are calling an UPDATE direectly. + +The data line that starts with "DROP FUNCTION" is considered SQL to drop the +wrapper function, if needed. It should be one line only. You do not need this +SQL if you are not using the CREATE FUNCTION SQL. + +If a data line starts with "ALTER TABLE", it is considered the enable/disable +triggers SQL statement. All triggers are disable before running the updates +and enabled afterward. + +After all of the above lines +are removed from consideration, what remains is the actual SQL update. It is +often just a SELECT statement that calls the wrapper function with the place +holders "~start_id~" and "~end_id~". This script will replace them with the +values it calculates. + +Comments and blank lines are ignored. + +Then, start a loop for each partition size and +start a child process for each one. Within each partision, execute the SQL +on only B number of rows. This is determined by setting the starting +and ending ID. Since IDs aren't always sequential, there may be less that +B number of rows updated. + +After all partitions execute the file script is moved +to the B folder. File scripts that encountered errors stay in +B with their error files. Triggers are enabled. + +A sample data file might look like this: + + SELECT MAX(id), MIN(id) from schema.some_table; + + ALTER TABLE schema.some_table DISABLE TRIGGER ALL; + + UPDATE schema.some_table SET col_name = something + WHERE id >= ~start_id~ AND id < ~end_id~; + +A sample file that updates using a standard function in Evergreen might look +like this: + + CREATE OR REPLACE FUNCTION schema.wrapper_function(start_id BIGINT, end_id BIGINT) + RETURNS void AS $$ + DECLARE + rec RECORD; + BEGIN + FOR rec IN SELECT id, some_col FROM schema.table_to_update WHERE id >= start_id AND id < end_id + LOOP + PERFORM schema.update_function( rec.id, rec.some_col ); + END LOOP; + END; + $$ LANGUAGE plpgsql; + + DROP FUNCTION IF EXISTS schema.wrapper_function(BIGINT, BIGINT); + + SELECT MAX(id), MIN(id) from schema.table_to_update; + + ALTER TABLE schema.some_table DISABLE TRIGGER ALL; + + SELECT schema.wrapper_function(~start_id~, ~end_id~); + +=cut + +print "Begin creating helper functions...\n"; +my $time = time(); +my @drop_func = create_helper_func( $dir, $db_params ); + +print "Begin executing post update scripts...\n"; +my $error; + +# All of these processes will run in parallel, up to $max_processes +foreach my $file ( glob "$dir/*_data.sql" ) { + my $input_fh; + + # Open file + unless ( open ($input_fh, '<', $file) ) { + + # Log error on failure + my $system_error = $!; + my ( $fail_fh, $error_file ) = get_error_file( $file ); + print $fail_fh "Unable to open $file for reading\n"; + print $fail_fh "$system_error\n"; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "Unable to open $file for reading\n"; + + next; + } + + # Parse input for different SQL statements + my ($sql, $desc, $range_sql, $create_func_sql, $drop_func_sql, $able_trigger, $truncate_sql) = + parse_input_file( $input_fh, $file ); + + #Truncate Table statement + if ($truncate_sql) { + next unless run_sql( $db_params, $truncate_sql, $file, $desc ); + } + + # Disable all triggers for this table + print "Disabling triggers...\n"; + able_all_triggers( 'DISABLE', $able_trigger, $file, $db_params ); + + # Create the function that will get called with an id range + if ($create_func_sql) { + next unless run_sql( $db_params, $create_func_sql, $file, $desc ); + } + + # Get the id ranges for this table + print "Getting id ranges...\n"; + unless ($range_sql) { die "*** Bad input script $file, no id ranges\n" } + my ($max_id, $min_id) = run_select_sql( $db_params, $range_sql, $file, $desc ); + + unless ( defined $max_id and defined $min_id ) { + my ( $fail_fh ) = get_error_file( $file ); + print $fail_fh "Could not determine the id ranges\n"; + next; + } + + # Break table into partitions based on id ranges and processes + my $count = $max_id - $min_id; + my $part_size = $count / $max_processes; # int div because of use integer + my $print_file = basename $file; + + for ( my $part = 0; $part < $count; $part += $part_size ) { + my $pid = $pm->start and next; + print "\t$file, part $part ($pid)\n"; + my $print_file = basename $file; + + # Execute SQL in ranges of ids based on min/max ids + for ( my $start_id = $part; $start_id < $part + $part_size; $start_id += $rows ) { + + # Set the start id in a copy of the SQL string + (my $exec_sql = $sql) =~ s/~start_id~/$start_id/i; + + # The last limit will probably not be the exact rows amount + my $left = $count - $start_id + 1; + my $this_rows = $rows <= $left ? $rows : $left; + $exec_sql =~ s/~end_id~/$start_id + $this_rows/ie; + + # Execute the SQL + if ( run_sql($db_params, $exec_sql, $file, $desc, $start_id ) ) { + print "\t$start_id, " . ($part + $part_size - $start_id) . + " left ($print_file)\n"; + } else { + $error = 1; + last; + } + } + + # Inform the parent process of the error + if ($error) { + $pm->finish(1); + last; + } else { + $pm->finish; + } + } + + $pm->wait_all_children; + + # Succesful finish, move script + unless ($error) { + my $base = basename $file; + my $finish_name = "$finished_dir/$base"; + + rename $file, $finish_name + or warn "*** Could not rename $file to $finish_name\n$!\n"; + } + + # Drop wrapper function + if ($drop_func_sql) { + unless ( run_sql( $db_params, $drop_func_sql, $file, $desc ) ) { + warn "*** Could not drop wrapper function\n"; + } + } + + # Enable all triggers for this table + print "Enabling triggers...\n"; + able_all_triggers( 'ENABLE', $able_trigger, $file, $db_params ); + + last if $error && $exit_on_error; + +} # foreach file + +# Drop any temporary functions used above +foreach my $drop (@drop_func) { + run_sql($db_params, $drop, 'No file', 'Drop function' ); +} + +print 'Finished' . ($error ? ' with error' : '') . "\n"; + +# Do this when the program ends, no matter what. +# A side effect of this is that time will print when each child process ends. +END { + print_time( $time ); +} + +=head1 Subroutines + +=head2 get_db_handle + +Get a database handle + +=over 4 + +=item Parameters + +B<$db_params> - reference to several DB parameters + +=item Returns + +B<$dbh> - database handle or zero + +=back + +=cut + +sub get_db_handle { + my $db_params = shift || return 0; + + my $platform = $db_params->{platform}; + my $database = $db_params->{database}; + my $host = $db_params->{host}; + my $port = $db_params->{port}; + my $user = $db_params->{user}; + my $pw = $db_params->{pw}; + + my $dsn = "dbi:$platform:dbname = $database; host = $host; port = $port"; + + my $dbh = DBI->connect( $dsn, $user, $pw, { + 'PrintError' => 1, + 'RaiseError' => 1, + 'PrintWarn' => 1, + 'AutoCommit' => 0 # Auto commit off so we can commit/rollback + }) or die "Unable to connect: " . $DBI::errstr . "\n"; + + return $dbh; +} + +=head2 run_sql + +Execute a non-SELECT SQL statement and capture any error output + +=over 4 + +=item Parameters + +B<$db_params> - the DB parameters (ref to hash) + +B<$sql> - the SQL statement + +B<$file> - the file name + +B<$desc> - a description of the task (first comment) + +B<$start_d> - the starting ID when the error occurred or zero + +=item Returns + +1 = success, 0 = failure + +=item Side Effects + +Creates a file with the extension .err if there is an error executing the SQL + +=back + +=cut + +sub run_sql { + my $db_params = shift; + my $sql = shift; + my $file = shift || 'no_file'; + my $desc = shift || 'SQL Script'; + my $start_id = shift || '0'; + + # Sanity check + unless ( $db_params and ref $db_params eq 'HASH' and $sql ) { + return 0; + } + + my $dbh = get_db_handle($db_params); + + # Catch any errors + eval { $dbh->do($sql) }; + + # If there were errors... + if ($@) { + warn "$@\n"; + + # Log SQL error + my $err = $dbh->errstr; + $dbh->rollback; + my $rollback_err = $dbh->errstr; + warn "*** $file rolled back\n" unless $rollback_err; + $dbh->disconnect; + my $disconnect_err = $dbh->errstr; + my ( $fail_fh, $error_file ) = get_error_file( $file, $start_id ); + + print $fail_fh "Can't execute SQL statement!\n"; + print $fail_fh "$file: $desc\n"; + print $fail_fh "$err\n"; + print $fail_fh "Rollback error: $rollback_err\n" if $rollback_err; + print $fail_fh "Disconnect error: $disconnect_err\n" if $disconnect_err; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Can't execute SQL statement! $file: $desc\n"; + + return 0; + } + + $dbh->commit; + $dbh->disconnect; + + return 1; +} + +=head2 run_select_sql + +Execute a SELECT SQL statement and fetch one column + +=over 4 + +=item Parameters + +B<$db_params> - the DB parameters (ref to hash) + +B<$sql> - the SQL statement + +B<$file> - the data input file name + +B<$desc> - a description of the task (first comment) + +B<$start_id> - the starting ID when the error occurred or zero + +=item Returns + +An array of column values in list context, or a reference to the array in +scalar context + +=item Side Effects + +Creates a file with the extension .err if there is an error executing the SQL + +=back + +=cut + +sub run_select_sql { + my $db_params = shift; + my $sql = shift; + my $file = shift || 'no_file'; + my $desc = shift || 'SQL Script'; + my $start_id = shift || '0'; + + # Sanity check + unless ( $db_params and ref $db_params eq 'HASH' and $sql ) { + return 0; + } + + my $dbh = get_db_handle($db_params); + my @row; + my $sth; + + eval { + $sth = $dbh->prepare( $sql ); + $sth->execute(); + @row = $sth->fetchrow_array(); + }; + + if ($@) { + warn "$@\n"; + + # Log SQL error + my $err = $dbh->errstr; + $dbh->disconnect; + my $disconnect_err = $dbh->errstr; + my ( $fail_fh, $error_file ) = get_error_file( $file, $start_id ); + + print $fail_fh "Can't execute SQL statement!\n"; + print $fail_fh "$file: $desc\n"; + print $fail_fh "$sql\n"; + print $fail_fh "$err\n"; + print $fail_fh "Disconnect error: $disconnect_err\n" if $disconnect_err; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Can't execute SQL statement! $file: $desc\n"; + + return undef; + } + + $sth->finish; + $dbh->disconnect; + return wantarray ? @row : \@row; +} + +=head2 get_error_file + +Create and open an error file. Put in a timestamp. The error file name is +the file name with the extention of .err. + +=over 4 + +=item Parameters + +B<$file> - the file name to create the error file for + +B<$start_id> - the starting ID when the error occurred or zero + +=item Returns + +An array in list context; a reference to an array in scalar context + +[0] B<$fail_fh> - the file handle of the openned error file + +[1] B<$error_file> - the error file name + +=back + +=cut + +sub get_error_file { + my $file = shift || 'unknown'; + my $start_id = shift || '0'; + + my ($sec, $min, $hour, $mday, $mon, $year) = (localtime(time))[0..5]; + my $timestamp = "$hour:$min:$sec " . ($mon + 1) . "-$mday-" . ($year + 1900); + my ( $basename, $dir ) = fileparse( $file, '.sql' ); + my $error_file = "$dir$basename-$start_id.err"; + my $fail_fh; + + open ($fail_fh, '>>', $error_file) + or die "Could not open $error_file for appending\n"; + print $fail_fh "$timestamp\n"; + my @return_data = ( $fail_fh, $error_file ); + + return wantarray ? @return_data : \@return_data; +} + +=head2 print_time + +Print time elapsed in hours, minutes, and seconds + +=over 4 + +=item Parameters + +B<$start> - the start time, taken from the I function + +=item Side Effects + +Prints elasped time to the standand out + +=back + +=cut + +sub print_time { + my $start = shift || 0; + my $elapsed = time() - $start; + my $hours = $elapsed / (60 * 60); + my $seconds = $elapsed % 60; + my $minutes = ($elapsed - $hours * 60 * 60) / 60; + + print "Time elapsed: "; + print "$hours hours, " if $hours; + print "$minutes minutes, " if $minutes; + print "$seconds seconds\n"; +} + +=head2 create_helper_func + +Create any helper functions needed by the update. The input data file should +contain a commented DROP statement what will drop the function when it's not +needed. For example: + + -- DROP FUNCTION IF EXISTS schema.some_function(BIGINT, TEXT) + + CREATE OR REPLACE FUNCTION schema.some_function(id BIGINT, marc TEXT) + ... + +=over 4 + +=item Parameters + +B<$dir> - the source directory for the input data files + +B<$db_params> - a hash reference to the DB parameters + +=item Returns + +In list context, an array of SQL DROP statements that will remove the helper +functions at the end of the update. In scalar context, a reference to that array. + +=item Side Effects + +An error file is created if an error in encountered. + +=back + +=cut + +sub create_helper_func { + my $dir = shift; + my $db_params = shift; + my @drop_func = (); + + foreach my $file ( glob "$dir/*_create.sql" ) { + + # Open file and get SQL statement + unless ( open (FH, '<', $file) ) { + + # Log error on failure + my $system_error = $!; + my ( $fail_fh, $error_file ) = get_error_file( $file ); + print $fail_fh "Unable to open $file for reading\n"; + print $fail_fh "$system_error\n"; + close $fail_fh or warn "Could not close $error_file\n$!\n"; + warn "*** Unable to open $file for reading\n"; + + next; + } + + my $sql = ''; + + # Loop thru create file + while () { + + # Collect DROP FUNCs in array + if ( /^\s*--\s*DROP\s+FUNCTION/ ) { + s/^\s*--\s*//; + push @drop_func, $_; + next; + } + + next if /^\s*--/; + next if /^\s*$/; #* this comment helps syntax highlighting + + $sql .= $_; + } + + close FH or warn "*** Could not close $file\n$!\n"; + run_sql( $db_params, $sql, $file, 'Create function' ) + or die "*** Could not create helper file\n"; + + } # end foreach $file + + return wantarray ? @drop_func : \@drop_func; +} + +=head2 able_all_triggers + +Enable/Disable triggers on a table. The SQL is pulled from the input data +file line that begins "ALTER TABLE". + +=over 4 + +=item Paramters + +B<$able> - The word ENABLE or DISABLE, depending on what you want to do to +the triggers. Defaults to DISABLE. + +B<$range_sql> - The SQL statement that gets the ID ranges, previously +extracted from the input data file. + +B<$file> - The name of the input data file. + +B<$db_params> - The DB parameters (ref to hash) + +=item Side Effects + +Enables or disables triggers for a table. + +=back + +=cut + +sub able_all_triggers { + my $able = shift || 'DISABLE'; + my $able_trigger = shift; + my $file = shift; + my $db_params = shift; + + unless ( $able =~ /ENABLE|DISABLE/i ) { + warn "*** Bad first param in able_all_triggers()\n"; + } + + # Change the SQL statement to reflect enabling or disabling + (my $sql = $able_trigger) =~ s{\b(?:ENABLE|DISABLE)\b}{\U$able\E}i; + + unless ( run_sql( $db_params, $sql, $file, "\L$able\E triggers" ) ) { + warn "*** Cannot \L$able\E triggers\n"; + } +} + +=head2 parse_input_file + +Parse the input data file for different SQL statements and return each +statement. + +=over 4 + +=item Parameters + +B<$input_fh> - a file handle opened to the input file + +B<$file> - the input file name + +=item Returns + +In array context, an array of all the different SQL statements parsed. In +scalar context, a reference to that array. + +[0] B<$sql> - the main updating SQL statement(s) + +[1] B<$desc> - the description of this task + +[2] B<$range_sql> - the SQL statement that gets the ID ranges + +[3] B<$create_func_sql> - the SQL to create a wrapper function, if any + +[4] B<$drop_func_sql> - the SQL statement that drops the wrapper function, if any + +[5] B<$able_trigger> - the SQL to enable/disable all triggers on the update table + +=back + +=cut + +sub parse_input_file { + my $input_fh = shift || return undef; + my $file = shift; + + my $sql = ''; + my $desc = ''; + my $range_sql = ''; + my $create_func_sql = ''; + my $drop_func_sql = ''; + my $able_trigger = ''; + my $truncate_sql = ''; + + # String SQL statement together + while (<$input_fh>) { + + # Kludge: remove anything that isn't ASCII 20-127 or whitespace + # (why are we getting weird characters in front of the first line?) + s/[^\x{21}-\x{7E}\s]//g; + + # First comment is the description + if ( $desc eq '' && /^\s*--\s*/ ) { + chomp; + s/^\s*--\s*//; # strip off dashes and leading whitespace + $desc = $_; + next; + } + + # Ignore comments and blank lines + next if /^\s*--/; + next if /^\s*$/; #* this comment helps syntax highlighting + + # Find the min and max ids select statement + if ( $range_sql eq '' && /^\s*SELECT\s+MAX/i ) { + chomp; + $range_sql = $_; + next; + } + + # Find drop function SQL + if ( $drop_func_sql eq '' && /^\s*DROP\s+FUNCTION\s+/i ) { + chomp; + $drop_func_sql = $_; + next; + } + + # Find truncate SQL + if ( $truncate_sql eq '' && /^\s*TRUNCATE\s+TABLE\s+/i ) { + chomp; + $truncate_sql = $_; + next; + } + + # Find enable/disable trigger statement + if ( $able_trigger eq '' && /^\s*ALTER\s+TABLE\s+/i ) { + chomp; + $able_trigger = $_; + next; + } + + # Get create function SQL + # Starts with "CREATE [OR REPLACE] FUNCTION..." + # Ends with "$$ LANGUAGE..." + if ( $create_func_sql eq '' && + /^\s*CREATE\s+(OR\s+REPLACE\s+)?FUNCTION\s+/i ) + { + while (1) { + $create_func_sql .= $_; + defined( $_ = <$input_fh> ) + or die "*** Readline failed: $!\nBad input script? $file\n"; + + if ( /^\s*\$\$\s+LANGUAGE\s+/i ) { + $create_func_sql .= $_; + last; + } + } + + next; + } + + # Add to execute SQL + $sql .= $_; + + } # end while readline SQL file + + close $input_fh or warn "*** Could not close $file\n$!\n"; + + my @return_data = ($sql, $desc, $range_sql, $create_func_sql, $drop_func_sql, $able_trigger, $truncate_sql); + + return wantarray ? @return_data : \@return_data; +} + +__END__ \ No newline at end of file diff --git a/KCLS/sql/browse/combined/reingest/reingest_metabib_data.sql b/KCLS/sql/browse/combined/reingest/reingest_metabib_data.sql new file mode 100644 index 0000000000..280b285ebb --- /dev/null +++ b/KCLS/sql/browse/combined/reingest/reingest_metabib_data.sql @@ -0,0 +1,20 @@ + +-- Must start with CREATE [OR REPLACE] FUNCTION and end with $$ LANGUAGE. +CREATE OR REPLACE FUNCTION biblio.wrap_reingest_metabib_field_entries(start_id BIGINT, end_id BIGINT) +RETURNS void AS $$ +DECLARE + rec RECORD; +BEGIN + FOR rec IN SELECT id, marc FROM biblio.record_entry WHERE id >= start_id AND id < end_id + LOOP + PERFORM metabib.reingest_metabib_field_entries( rec.id, false, false, false ); + END LOOP; +END; +$$ LANGUAGE plpgsql; + +-- Get min and max id from update file +SELECT MAX(id), MIN(id) FROM biblio.record_entry; + +-- Execute part of the trigger with start and end id. +-- These are replaced by the post_update_driver script. +SELECT biblio.wrap_reingest_metabib_field_entries(~start_id~, ~end_id~); \ No newline at end of file diff --git a/KCLS/sql/browse/combined/runReingestSh.sh b/KCLS/sql/browse/combined/runReingestSh.sh new file mode 100755 index 0000000000..5c39f4705c --- /dev/null +++ b/KCLS/sql/browse/combined/runReingestSh.sh @@ -0,0 +1,4 @@ +date +echo reingest +date +perl reingest/data_update_driver.pl --dir reingest/ --finished-dir reingest/fin --database evergreen --host evergreentest.catalystitservices.com --port 5415 --user evergreen --password evergreen --max-processes 4 --rows 2500 diff --git a/KCLS/sql/browse/combined/runSh.sh b/KCLS/sql/browse/combined/runSh.sh new file mode 100755 index 0000000000..07a247b59b --- /dev/null +++ b/KCLS/sql/browse/combined/runSh.sh @@ -0,0 +1,32 @@ +#!/bin/sh +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" + return +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" + return +fi + +echo hostname is $HNAME and port is $PORT '\n' + +for file in *.sql +do + echo '\n' + date + echo upgrading with $file + psql -U evergreen -h $HNAME -p $PORT -d evergreen -f $file +done + +echo '\n' +date + +# date +# echo reingest +# date +# perl reingest/data_update_driver.pl --dir reingest/ --finished-dir reingest/fin --database evergreen --host $HNAME --port $PORT --user evergreen --password evergreen --max-processes 4 --rows 2500 diff --git a/KCLS/sql/browse/kmain-706/kmain-706.4-to-1-rollback.sql b/KCLS/sql/browse/kmain-706/kmain-706.4-to-1-rollback.sql new file mode 100644 index 0000000000..c3ac470991 --- /dev/null +++ b/KCLS/sql/browse/kmain-706/kmain-706.4-to-1-rollback.sql @@ -0,0 +1,521 @@ +-- Rollback for testing going from 4 tables to 1 for various browse tables. (metabib.browse_entry, metabib.browse_entry_def_map, metabib.browse_entry_simple_heading_map) +-- The following functions are reverted: +-- * metabib.reingest_metabib_field_entries +-- * metabib.staged_browse +-- * metabib.browse +-- * authority.indexing_ingest_or_delete + +BEGIN; + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + --ver1.1 updated with kmain-806 + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.truncated_sort_value <= ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.truncated_sort_value > ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; +BEGIN + --ver1.1 updated with kmain-806 - added support for split browse_entry and browse_entry_simple_heading_map tables + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- Get the search_class + SELECT INTO search_class cmf.field_class + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; + + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/browse/kmain-706/kmain-706.4-to-1.sql b/KCLS/sql/browse/kmain-706/kmain-706.4-to-1.sql new file mode 100644 index 0000000000..1c90c99109 --- /dev/null +++ b/KCLS/sql/browse/kmain-706/kmain-706.4-to-1.sql @@ -0,0 +1,14 @@ +-- Testing going from 4 tables to 1 for various browse tables. (metabib.browse_entry, metabib.browse_entry_def_map, metabib.browse_entry_simple_heading_map) +-- The following functions are reverted: +-- * metabib.reingest_metabib_field_entries +-- * metabib.staged_browse +-- * metabib.browse +-- * authority.indexing_ingest_or_delete + +BEGIN; + + + + + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/browse/kmain212.README b/KCLS/sql/browse/kmain212.README new file mode 100644 index 0000000000..322cb79baf --- /dev/null +++ b/KCLS/sql/browse/kmain212.README @@ -0,0 +1,11 @@ +These scripts rely on the old browse system, and rollback to, kmain553 (the browse speed up scripts). +So you need both at the moment. + +To install, run scripts in this order: + +1: metabib.staged_browse.SQL +2: biblio.extract_metabib_field_entry.SQL +3: metabib.reinjest_metabib_field_entries.SQL +4: Call_Number.SQL +5: metabib.browse_function.SQL + diff --git a/KCLS/sql/browse/kmain212.testsForSQL b/KCLS/sql/browse/kmain212.testsForSQL new file mode 100644 index 0000000000..9a0b9a26a8 --- /dev/null +++ b/KCLS/sql/browse/kmain212.testsForSQL @@ -0,0 +1,146 @@ +SELECT metabib.browse('author','j'); +SELECT metabib.browse('id|bibcn','j'); + +--search_field +SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'author'; -- {10,8,7,205,217,232,235,236,9,118} +SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; -- {25} + +--pivot_id +SELECT metabib.browse_author_pivot((SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'author'), 'j'); -- 1908016 + +SELECT metabib.browse_call_number_pivot((SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'), 'j'); -- 5 + +--pivot_sort_value, pivot_sort_fallback +SELECT truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = 1908016; -- "j eric", "J., Eric." + +SELECT truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = 5; -- "j rowling", "J ROWLING" + +SELECT quote_literal('{10,8,7,205,217,232,235,236,9,118}') +SELECT cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[]) + +--forward_query author +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_author_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_author_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal('j eric') ORDER BY mbe.sort_value, mbe.value --LOTS! + +--forward_query call_number / back doesn't get anything +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_call_number_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_call_number_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast('{25}' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast('{25}' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal('j') ORDER BY mbe.sort_value, mbe.value --5,6 + +--staged browse author +SELECT * FROM metabib.staged_browse( + 'SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_author_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_author_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast(''{10,8,7,205,217,232,235,236,9,118}'' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast(''{10,8,7,205,217,232,235,236,9,118}'' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal(''j eric'') ORDER BY mbe.sort_value, mbe.value', + cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[]), + null, null, FALSE, 100, FALSE, 5, 9, + 'author' + ) ORDER BY row_number DESC; + +--staged browse call_number +SELECT * FROM metabib.staged_browse( + 'SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_call_number_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_call_number_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(cast(''{25}'' AS integer[])) + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(cast(''{25}'' AS integer[])) + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND mbe.truncated_sort_value > quote_literal(''j'') ORDER BY mbe.sort_value, mbe.value', + cast('{25}' AS integer[]), + null, null, FALSE, 100, FALSE, 5, 9, + 'call_number' + ) ORDER BY row_number DESC; + + + +-- gather aggregate +SELECT + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = 2087343 + AND def = ANY(cast('{10,8,7,205,217,232,235,236,9,118}' AS integer[]));--"{959591}" + +SELECT + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_call_number_entry_def_map + WHERE entry = 6 + AND def = ANY(cast('{25}' AS integer[]));--"{132163}" \ No newline at end of file diff --git a/KCLS/sql/browse/kmain553.README b/KCLS/sql/browse/kmain553.README new file mode 100644 index 0000000000..8d6c2a3096 --- /dev/null +++ b/KCLS/sql/browse/kmain553.README @@ -0,0 +1,7 @@ +1: Run all scripts in FullProcessBySearchType +2: Run metabib.browse_function.SQL +3: Run metabib.staged_browse_function.SQL +4: Run metabib.reinjest_metabib_field_entries.SQL +5: *DON'T DO THIS UNTIL YOU'RE SURE!!* Run clobber old tables + +NOTE: The new browse data will be just as complete as the old browse data. diff --git a/KCLS/sql/browse/kmain691.sql b/KCLS/sql/browse/kmain691.sql new file mode 100644 index 0000000000..8e223e9f75 --- /dev/null +++ b/KCLS/sql/browse/kmain691.sql @@ -0,0 +1,10 @@ +--KMAIN-691: Subject Browse Displayed Twice Error + +--This display is fixed by changing the browse_xpath in config.metabib_field. +--It uses the facet instead. + +BEGIN; + + UPDATE config.metabib_field SET browse_xpath = '//*[local-name()=''facet'']' WHERE field_class LIKE 'subject' AND format LIKE 'kcls'; + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/browse/kmain925/kmain925-remove_duplicate_auth_records b/KCLS/sql/browse/kmain925/kmain925-remove_duplicate_auth_records new file mode 100644 index 0000000000..84c6f2b715 --- /dev/null +++ b/KCLS/sql/browse/kmain925/kmain925-remove_duplicate_auth_records @@ -0,0 +1,73 @@ +-- For 001 a +-- Prepare +CREATE TABLE authority.duplicate_001 AS SELECT record, value FROM authority.full_rec WHERE tag ILIKE '001' AND subfield ILIKE 'a' AND value IN (SELECT value FROM authority.full_rec WHERE tag ILIKE '001' GROUP by value HAVING count(id) > 1) ORDER BY value; +CREATE TABLE authority.dup_001_to_delete AS SELECT record FROM authority.duplicate_001 WHERE record NOT IN (SELECT min(record) FROM authority.duplicate_001 GROUP BY value); + +CREATE OR REPLACE FUNCTION authority.remove_dup_001_from_are () + RETURNS void AS +$BODY$ +DECLARE + id_to_delete BIGINT; +BEGIN + FOR id_to_delete IN SELECT record FROM authority.dup_001_to_delete LIMIT 1000 LOOP + UPDATE authority.record_entry SET deleted = TRUE + WHERE id = id_to_delete; + + DELETE FROM authority.full_rec WHERE record = id_to_delete; + DELETE FROM authority.simple_heading WHERE record = id_to_delete; + + DELETE FROM authority.dup_001_to_delete + WHERE record = id_to_delete; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.remove_dup_001_from_are() + OWNER TO evergreen; + +-- Run functions until all duplicates are done. +SELECT * FROM authority.remove_dup_001_from_are(); -- authority.dup_001_to_delete is empty. + +-- Clean up +DROP FUNCTION authority.remove_dup_001_from_are(); +DROP TABLE authority.duplicate_001; +DROP TABLE authority.dup_001_to_delete; + + +-- Now for 010 a + +--Prepare +CREATE TABLE authority.duplicate_010 AS SELECT record, value FROM authority.full_rec WHERE tag ILIKE '010' AND subfield ILIKE 'a' AND value IN (SELECT value FROM authority.full_rec WHERE tag ILIKE '010' GROUP by value HAVING count(id) > 1) ORDER BY value; +CREATE TABLE authority.dup_010_to_delete AS SELECT record FROM authority.duplicate_010 WHERE record NOT IN (SELECT min(record) FROM authority.duplicate_010 GROUP BY value); + +CREATE OR REPLACE FUNCTION authority.remove_dup_010_from_are () + RETURNS void AS +$BODY$ +DECLARE + id_to_delete BIGINT; +BEGIN + FOR id_to_delete IN SELECT record FROM authority.dup_010_to_delete LIMIT 1000 LOOP + UPDATE authority.record_entry SET deleted = TRUE + WHERE id = id_to_delete; + + DELETE FROM authority.full_rec WHERE record = id_to_delete; + DELETE FROM authority.simple_heading WHERE record = id_to_delete; + + DELETE FROM authority.dup_010_to_delete + WHERE record = id_to_delete; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.remove_dup_010_from_are() + OWNER TO evergreen; + +-- Run functions until all duplicates are done. +SELECT * FROM authority.remove_dup_010_from_are(); -- authority.dup_010_to_delete is empty. + +-- Clean up +DROP FUNCTION authority.remove_dup_010_from_are(); +DROP TABLE authority.duplicate_010; +DROP TABLE authority.dup_010_to_delete; diff --git a/KCLS/sql/browse/rollback/025.kmain936.remove_browse_entry_references.rollback.SQL b/KCLS/sql/browse/rollback/025.kmain936.remove_browse_entry_references.rollback.SQL new file mode 100644 index 0000000000..ed4a3624d8 --- /dev/null +++ b/KCLS/sql/browse/rollback/025.kmain936.remove_browse_entry_references.rollback.SQL @@ -0,0 +1,351 @@ +CREATE FUNCTION browse_authority_pivot(integer[], text) RETURNS bigint + LANGUAGE sql STABLE + AS $_$ + -- So far this function is not called. When its usage is known, depending on + -- how it is called/used we can use that information to modify it to use the new + -- broken apart tables for both metabib.browse_entry and metabib.browse_entry_simple_heading_map + --ver1.1 updated with kmain-806 - added note + SELECT mbe.id + FROM metabib.browse_entry mbe + -- JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + -- JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + -- JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + -- ash.atag = map.authority_field + -- AND map.metabib_field = ANY($1) + -- ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$_$; + + +ALTER FUNCTION metabib.browse_authority_pivot(integer[], text) OWNER TO evergreen; + +--puts browse_entry type back in (instead of browse_author_entry) +CREATE OR REPLACE FUNCTION indexing_ingest_or_delete() RETURNS trigger + LANGUAGE plpgsql + AS $_$ +DECLARE + ashps authority.simple_heading_plus%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashps IN SELECT * FROM authority.simple_heading_plus_set(NEW.marc) LOOP + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashps.record, ashps.atag, ashps.value, ashps.sort_value); + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- Get the search_class + SELECT INTO search_class cmf.field_class + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashps.atag; + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$_$; + +--puts browse_entry type back in (instead of browse_author_entry) +CREATE OR REPLACE FUNCTION reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) RETURNS void + LANGUAGE plpgsql + AS $_X$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$_X$; \ No newline at end of file diff --git a/KCLS/sql/browse/rollback/kmain212.Call_Number.SQL.Rollback b/KCLS/sql/browse/rollback/kmain212.Call_Number.SQL.Rollback new file mode 100644 index 0000000000..44732a46da --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain212.Call_Number.SQL.Rollback @@ -0,0 +1,11 @@ +DROP TRIGGER metabib_browse_call_number_entry_fti_trigger ON metabib.browse_call_number_entry; + +DROP TABLE metabib.browse_call_number_entry_def_map; + +DROP TABLE metabib.browse_call_number_entry; + +DROP TABLE metabib.call_number_field_entry; + +UPDATE config.metabib_field + SET browse_field=false + WHERE field_class = 'identifier' AND name = 'bibcn'; diff --git a/KCLS/sql/browse/rollback/kmain212.biblio.extract_metabib_field_entry.SQL.Rollback b/KCLS/sql/browse/rollback/kmain212.biblio.extract_metabib_field_entry.SQL.Rollback new file mode 100644 index 0000000000..414cfb2b58 --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain212.biblio.extract_metabib_field_entry.SQL.Rollback @@ -0,0 +1,198 @@ +-- Function: biblio.extract_metabib_field_entry(bigint, text) + +-- DROP FUNCTION biblio.extract_metabib_field_entry(bigint, text); + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry(rid bigint, default_joiner text) + RETURNS SETOF metabib.field_entry_template AS +$BODY$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field = true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION biblio.extract_metabib_field_entry(bigint, text) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/rollback/kmain212.metabib.reingest_metabib_field_entries.SQL.Rollback b/KCLS/sql/browse/rollback/kmain212.metabib.reingest_metabib_field_entries.SQL.Rollback new file mode 100644 index 0000000000..92e9363f55 --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain212.metabib.reingest_metabib_field_entries.SQL.Rollback @@ -0,0 +1,174 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/rollback/kmain212.metabib.staged_browse.SQL.Rollback b/KCLS/sql/browse/rollback/kmain212.metabib.staged_browse.SQL.Rollback new file mode 100644 index 0000000000..a04fce8d88 --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain212.metabib.staged_browse.SQL.Rollback @@ -0,0 +1,237 @@ +-- Function: metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + +-- DROP FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text); + +CREATE OR REPLACE FUNCTION metabib.staged_browse(query text, fields integer[], context_org integer, context_locations integer[], staff boolean, browse_superpage_size integer, count_up_from_zero boolean, result_limit integer, next_pivot_pos integer, search_class text) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + CASE search_class + WHEN 'author' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'title' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_title_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'subject' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_subject_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + WHEN 'series' THEN + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_series_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + ELSE + END CASE; + + + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/rollback/kmain212.metabib_browse_function.SQL.ROLLBACK b/KCLS/sql/browse/rollback/kmain212.metabib_browse_function.SQL.ROLLBACK new file mode 100644 index 0000000000..11e7f8cebd --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain212.metabib_browse_function.SQL.ROLLBACK @@ -0,0 +1,150 @@ +-- browse_authority_refs_pivot -------------------------------------------------------------------- + +DROP FUNCTION metabib.browse_call_number_authority_refs_pivot(integer[], text); + +-- metabib.browse_bib_pivot(integer[], text) ------------------------------------------------------ + +DROP FUNCTION metabib.browse_call_number_bib_pivot(integer[], text); + +-- browse_pivot -------------------------------------------------------------------- + +DROP FUNCTION metabib.browse_call_number_pivot(search_field integer[], browse_term text)n; + + +-- Function: metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + + + -- Get search field int list with search_class + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + END CASE; + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.truncated_sort_value <= ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.truncated_sort_value > ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + + + diff --git a/KCLS/sql/browse/rollback/kmain538.create_and_index_truncated_sort_value.rollback.sql b/KCLS/sql/browse/rollback/kmain538.create_and_index_truncated_sort_value.rollback.sql new file mode 100644 index 0000000000..fcab3dbafa --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain538.create_and_index_truncated_sort_value.rollback.sql @@ -0,0 +1,11 @@ +--Remove indexing from truncated_sort_value +DROP INDEX metabib.browse_entry_truncated_sort_value_idx; + +--Drop the truncated column +ALTER TABLE metabib.browse_entry DROP COLUMN truncated_sort_value + +--Re-Index on sort_value +CREATE INDEX browse_entry_sort_value_idx + ON metabib.browse_entry + USING btree + (md5(sort_value COLLATE "default") COLLATE pg_catalog."default"); diff --git a/KCLS/sql/browse/rollback/kmain538.replace_browse_function.rollback.sql b/KCLS/sql/browse/rollback/kmain538.replace_browse_function.rollback.sql new file mode 100644 index 0000000000..7489fc7f6d --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain538.replace_browse_function.rollback.sql @@ -0,0 +1,116 @@ +--Revert to old metabib.browse function +CREATE OR REPLACE FUNCTION metabib.browse(search_field integer[], browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + pivot_id := metabib.browse_pivot(search_field, browse_term); + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.sort_value <= ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.sort_value > ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/rollback/kmain538.replace_metabib_reingest_metabib_field_entries-rollback.sql b/KCLS/sql/browse/rollback/kmain538.replace_metabib_reingest_metabib_field_entries-rollback.sql new file mode 100644 index 0000000000..2d51ad08fd --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain538.replace_metabib_reingest_metabib_field_entries-rollback.sql @@ -0,0 +1,104 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; diff --git a/KCLS/sql/browse/rollback/kmain553.All.SQL.Rollback b/KCLS/sql/browse/rollback/kmain553.All.SQL.Rollback new file mode 100644 index 0000000000..4737625a5a --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain553.All.SQL.Rollback @@ -0,0 +1,14 @@ +DROP TRIGGER metabib_browse_author_entry_fti_trigger ON metabib.browse_author_entry; +DROP TRIGGER metabib_browse_title_entry_fti_trigger ON metabib.browse_title_entry; +DROP TRIGGER metabib_browse_subject_entry_fti_trigger ON metabib.browse_subject_entry; +DROP TRIGGER metabib_browse_series_entry_fti_trigger ON metabib.browse_series_entry; + +DROP TABLE metabib.browse_author_entry_def_map; +DROP TABLE metabib.browse_title_entry_def_map; +DROP TABLE metabib.browse_subject_entry_def_map; +DROP TABLE metabib.browse_series_entry_def_map; + +DROP TABLE metabib.browse_author_entry; +DROP TABLE metabib.browse_title_entry; +DROP TABLE metabib.browse_subject_entry; +DROP TABLE metabib.browse_series_entry; \ No newline at end of file diff --git a/KCLS/sql/browse/rollback/kmain553.metabib.reingest_metabib_field_entries.SQL.Rollback b/KCLS/sql/browse/rollback/kmain553.metabib.reingest_metabib_field_entries.SQL.Rollback new file mode 100644 index 0000000000..4fcdaeb0ae --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain553.metabib.reingest_metabib_field_entries.SQL.Rollback @@ -0,0 +1,105 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/rollback/kmain553.metabib.staged_browse_function.SQL.ROLLBACK b/KCLS/sql/browse/rollback/kmain553.metabib.staged_browse_function.SQL.ROLLBACK new file mode 100644 index 0000000000..0de7b30871 --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain553.metabib.staged_browse_function.SQL.ROLLBACK @@ -0,0 +1,207 @@ +-- Function: metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer) + +-- DROP FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer); + +CREATE OR REPLACE FUNCTION metabib.staged_browse(query text, fields integer[], context_org integer, context_locations integer[], staff boolean, browse_superpage_size integer, count_up_from_zero boolean, result_limit integer, next_pivot_pos integer) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer) + OWNER TO evergreen; + diff --git a/KCLS/sql/browse/rollback/kmain553.metabib_browse_function.SQL.ROLLBACK b/KCLS/sql/browse/rollback/kmain553.metabib_browse_function.SQL.ROLLBACK new file mode 100644 index 0000000000..3f5e569f1e --- /dev/null +++ b/KCLS/sql/browse/rollback/kmain553.metabib_browse_function.SQL.ROLLBACK @@ -0,0 +1,133 @@ +-- browse_authority_refs_pivot -------------------------------------------------------------------- + +DROP FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text); +DROP FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text); +DROP FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text); +DROP FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text); + + +-- metabib.browse_bib_pivot(integer[], text) ------------------------------------------------------ + +DROP FUNCTION metabib.browse_author_bib_pivot(integer[], text); +DROP FUNCTION metabib.browse_title_bib_pivot(integer[], text); +DROP FUNCTION metabib.browse_subject_bib_pivot(integer[], text); +DROP FUNCTION metabib.browse_series_bib_pivot(integer[], text); + + +-- browse_pivot -------------------------------------------------------------------- + +DROP FUNCTION metabib.browse_author_pivot(search_field integer[], browse_term text); +DROP FUNCTION metabib.browse_title_pivot(search_field integer[], browse_term text); +DROP FUNCTION metabib.browse_subject_pivot(search_field integer[], browse_term text); +DROP FUNCTION metabib.browse_series_pivot(search_field integer[], browse_term text); + + +-- Function: metabib.browse(integer[], text, integer, integer, boolean, bigint, integer) + +DROP FUNCTION metabib.browse(integer[], text, integer, integer, boolean, bigint, integer); + + + +-- REMAKE OLD FUNCTIONS --------------------------------------------------------------------------- + + +-- Function: metabib.browse_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_authority_refs_pivot(integer[], text) + OWNER TO evergreen; + + +-- Function: metabib.browse_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_bib_pivot(integer[], text) + OWNER TO evergreen; + + + +-- Function: metabib.browse_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + SELECT id FROM metabib.browse_entry + WHERE id IN ( + metabib.browse_bib_pivot($1, $2), + metabib.browse_authority_refs_pivot($1,$2) -- only look in 4xx, 5xx, 7xx of authority + ) + ORDER BY sort_value, value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_pivot(integer[], text) + OWNER TO evergreen; + + + +-- Function: metabib.browse(text, text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +BEGIN + RETURN QUERY SELECT * FROM metabib.browse( + (SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class), + browse_term, + context_org, + context_loc_group, + staff, + pivot_id, + result_limit + ); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + + + + + + + diff --git a/KCLS/sql/export_process/rollback_scripts/001.public.export-generate-ids-rollback.sql b/KCLS/sql/export_process/rollback_scripts/001.public.export-generate-ids-rollback.sql new file mode 100644 index 0000000000..80640797f4 --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/001.public.export-generate-ids-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_generate_ids() + +DROP FUNCTION public.export_generate_ids(start_date date, end_date date); + diff --git a/KCLS/sql/export_process/rollback_scripts/002.public.export-ids-001-rollback.sql b/KCLS/sql/export_process/rollback_scripts/002.public.export-ids-001-rollback.sql new file mode 100644 index 0000000000..bae5c229c3 --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/002.public.export-ids-001-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_ids_001(bigint) + +DROP FUNCTION public.export_ids_001(bigint); + diff --git a/KCLS/sql/export_process/rollback_scripts/003.public.export-ids-998-rollback.sql b/KCLS/sql/export_process/rollback_scripts/003.public.export-ids-998-rollback.sql new file mode 100644 index 0000000000..24f1d533db --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/003.public.export-ids-998-rollback.sql @@ -0,0 +1,5 @@ +-- Function: public.export_ids_998(bigint) + +DROP FUNCTION public.export_ids_998(bigint); + + diff --git a/KCLS/sql/export_process/rollback_scripts/004.public.export-ids-has-copy-rollback.sql b/KCLS/sql/export_process/rollback_scripts/004.public.export-ids-has-copy-rollback.sql new file mode 100644 index 0000000000..444d672f07 --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/004.public.export-ids-has-copy-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_ids_has_copy(bigint) + +DROP FUNCTION public.export_ids_has_copy(bigint); + diff --git a/KCLS/sql/export_process/rollback_scripts/005.public.export-ids-ldr-rollback.sql b/KCLS/sql/export_process/rollback_scripts/005.public.export-ids-ldr-rollback.sql new file mode 100644 index 0000000000..5fd67423da --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/005.public.export-ids-ldr-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_ids_LDR(bigint) + +DROP FUNCTION public.export_ids_LDR(bigint); + diff --git a/KCLS/sql/export_process/rollback_scripts/006.public.export-ids-cat-date-rollback.sql b/KCLS/sql/export_process/rollback_scripts/006.public.export-ids-cat-date-rollback.sql new file mode 100644 index 0000000000..03ea471cd8 --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/006.public.export-ids-cat-date-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_ids_cat_date(bigint, date, date) + +DROP FUNCTION public.export_ids_cat_date(bigint, date, date); + diff --git a/KCLS/sql/export_process/rollback_scripts/007.public.export-ids-086-092-099-rollback.sql b/KCLS/sql/export_process/rollback_scripts/007.public.export-ids-086-092-099-rollback.sql new file mode 100644 index 0000000000..f8c80b1086 --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/007.public.export-ids-086-092-099-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_ids_086_092_099(bigint) + +DROP FUNCTION public.export_ids_086_092_099(bigint); + diff --git a/KCLS/sql/export_process/rollback_scripts/runSh.sh b/KCLS/sql/export_process/rollback_scripts/runSh.sh new file mode 100755 index 0000000000..1af116d026 --- /dev/null +++ b/KCLS/sql/export_process/rollback_scripts/runSh.sh @@ -0,0 +1,28 @@ +#!/bin/sh +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" + return +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" + return +fi + +echo hostname is $HNAME and port is $PORT '\n' +date + +for file in *.sql +do + echo '\n' + date + echo upgrading with $file + psql -U evergreen -h $HNAME -p $PORT -d evergreen -f $file +done + +echo '\n' +date diff --git a/KCLS/sql/export_process/runSh.sh b/KCLS/sql/export_process/runSh.sh new file mode 100755 index 0000000000..1af116d026 --- /dev/null +++ b/KCLS/sql/export_process/runSh.sh @@ -0,0 +1,28 @@ +#!/bin/sh +if [ "$1" = "-h" ]; then + HNAME=$2 +else + echo Please enter hostname with -h "hostname" + return +fi + +if [ "$3" = "-p" ]; then + PORT=$4 +else + echo Please enter port with -p "port" + return +fi + +echo hostname is $HNAME and port is $PORT '\n' +date + +for file in *.sql +do + echo '\n' + date + echo upgrading with $file + psql -U evergreen -h $HNAME -p $PORT -d evergreen -f $file +done + +echo '\n' +date diff --git a/KCLS/sql/kmain-1060/authority-indexing-ingest-or-delete.sql b/KCLS/sql/kmain-1060/authority-indexing-ingest-or-delete.sql new file mode 100644 index 0000000000..0592461d35 --- /dev/null +++ b/KCLS/sql/kmain-1060/authority-indexing-ingest-or-delete.sql @@ -0,0 +1,152 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1060/authority-simple-heading-set.sql b/KCLS/sql/kmain-1060/authority-simple-heading-set.sql new file mode 100644 index 0000000000..6a8e89e3e6 --- /dev/null +++ b/KCLS/sql/kmain-1060/authority-simple-heading-set.sql @@ -0,0 +1,83 @@ +-- Function: authority.simple_heading_set(text) + +-- DROP FUNCTION authority.simple_heading_set(text); + +CREATE OR REPLACE FUNCTION authority.simple_heading_set(marcxml text) + RETURNS SETOF authority.simple_heading AS +$BODY$ +DECLARE + res authority.simple_heading%ROWTYPE; + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + cset INT; + heading_text TEXT; + joiner_text TEXT; + sort_text TEXT; + tmp_text TEXT; + tmp_xml TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + --ver1.0 + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + res.record := auth_id; + + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset LOOP + + res.atag := acsaf.id; + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + joiner_text := COALESCE(acsaf.joiner, ' '); + + FOR tmp_xml IN SELECT UNNEST(XPATH('//*[@tag="'||tag_used||'"]', marcxml::XML)) LOOP + + heading_text := COALESCE( + oils_xpath_string('./*[contains("'||acsaf.display_sf_list||'",@code)]', tmp_xml::TEXT, joiner_text), + '' + ); + IF nfi_used IS NOT NULL THEN + sort_text := SUBSTRING( + heading_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tmp_xml::TEXT), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + ELSE + sort_text := heading_text; + END IF; + IF heading_text IS NOT NULL AND heading_text <> '' THEN + res.value := heading_text; + res.sort_value := public.naco_normalize(sort_text); + res.index_vector = to_tsvector('keyword'::regconfig, res.sort_value); + RETURN NEXT res; + END IF; + END LOOP; + END LOOP; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql IMMUTABLE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.simple_heading_set(text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1060/metabib-reingest-metabib-field-entries.sql b/KCLS/sql/kmain-1060/metabib-reingest-metabib-field-entries.sql new file mode 100644 index 0000000000..26a8a706eb --- /dev/null +++ b/KCLS/sql/kmain-1060/metabib-reingest-metabib-field-entries.sql @@ -0,0 +1,195 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + IF char_length(value_prepped) > 0 THEN + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO bbonner; diff --git a/KCLS/sql/kmain-1060/rollback_scripts/authority-indexing-ingest-or-delete-rollback.sql b/KCLS/sql/kmain-1060/rollback_scripts/authority-indexing-ingest-or-delete-rollback.sql new file mode 100644 index 0000000000..61f2bb764b --- /dev/null +++ b/KCLS/sql/kmain-1060/rollback_scripts/authority-indexing-ingest-or-delete-rollback.sql @@ -0,0 +1,155 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashps authority.simple_heading_plus%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + value_prepped TEXT; + field_id integer; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashps IN SELECT * FROM authority.simple_heading_plus_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashps.atag; + + value_prepped := metabib.browse_normalize(ashps.original_text, field_id); + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashps.record, ashps.atag, value_prepped, ashps.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1060/rollback_scripts/authority-simple-heading-set-rollback.sql b/KCLS/sql/kmain-1060/rollback_scripts/authority-simple-heading-set-rollback.sql new file mode 100644 index 0000000000..8a095e9a53 --- /dev/null +++ b/KCLS/sql/kmain-1060/rollback_scripts/authority-simple-heading-set-rollback.sql @@ -0,0 +1,83 @@ +-- Function: authority.simple_heading_set(text) + +-- DROP FUNCTION authority.simple_heading_set(text); + +CREATE OR REPLACE FUNCTION authority.simple_heading_set(marcxml text) + RETURNS SETOF authority.simple_heading AS +$BODY$ +DECLARE + res authority.simple_heading%ROWTYPE; + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + cset INT; + heading_text TEXT; + joiner_text TEXT; + sort_text TEXT; + tmp_text TEXT; + tmp_xml TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + --ver1.0 + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + res.record := auth_id; + + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset LOOP + + res.atag := acsaf.id; + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + joiner_text := COALESCE(acsaf.joiner, ' '); + + FOR tmp_xml IN SELECT UNNEST(XPATH('//*[@tag="'||tag_used||'"]', marcxml::XML)) LOOP + + heading_text := COALESCE( + oils_xpath_string('./*[contains("'||acsaf.sf_list||'",@code)]', tmp_xml::TEXT, joiner_text), + '' + ); + IF nfi_used IS NOT NULL THEN + sort_text := SUBSTRING( + heading_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tmp_xml::TEXT), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + ELSE + sort_text := heading_text; + END IF; + IF heading_text IS NOT NULL AND heading_text <> '' THEN + res.value := heading_text; + res.sort_value := public.naco_normalize(sort_text); + res.index_vector = to_tsvector('keyword'::regconfig, res.sort_value); + RETURN NEXT res; + END IF; + END LOOP; + END LOOP; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql IMMUTABLE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.simple_heading_set(text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1060/rollback_scripts/metabib-reingest-metabib-field-entries-rollback.sql b/KCLS/sql/kmain-1060/rollback_scripts/metabib-reingest-metabib-field-entries-rollback.sql new file mode 100644 index 0000000000..5be9d51a7d --- /dev/null +++ b/KCLS/sql/kmain-1060/rollback_scripts/metabib-reingest-metabib-field-entries-rollback.sql @@ -0,0 +1,195 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + IF char_length(value_prepped) > 0 THEN + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO bbonner; diff --git a/KCLS/sql/kmain-1074/kmain-1074-indexing_ingest_or_delete.sql b/KCLS/sql/kmain-1074/kmain-1074-indexing_ingest_or_delete.sql new file mode 100644 index 0000000000..ea4163d1ac --- /dev/null +++ b/KCLS/sql/kmain-1074/kmain-1074-indexing_ingest_or_delete.sql @@ -0,0 +1,166 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + + -- Remove the actual linking subfields present in + -- marc bib records that is controlled by this one + FOR b_marc IN SELECT * FROM biblio.record_entry + WHERE id IN (SELECT bib FROM authority.bib_linking WHERE authority = NEW.id) LOOP + UPDATE biblio.record_entry + SET marc = (SELECT regexp_replace(b_marc.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = b_marc.id; + END LOOP; + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Remove the actual linking subfields present in + -- authority records that target this one + FOR t_marc IN SELECT * FROM authority.record_entry + WHERE id IN (SELECT source FROM authority.authority_linking WHERE target = NEW.id) LOOP + UPDATE authority.record_entry + SET marc = (SELECT regexp_replace(t_marc.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = t_marc.id; + END LOOP; + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-1074/kmain-1074-rollback.sql b/KCLS/sql/kmain-1074/kmain-1074-rollback.sql new file mode 100644 index 0000000000..ac497680f0 --- /dev/null +++ b/KCLS/sql/kmain-1074/kmain-1074-rollback.sql @@ -0,0 +1,153 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-1102/kmain-1102-authority-reingest-rollback.sql b/KCLS/sql/kmain-1102/kmain-1102-authority-reingest-rollback.sql new file mode 100644 index 0000000000..0592461d35 --- /dev/null +++ b/KCLS/sql/kmain-1102/kmain-1102-authority-reingest-rollback.sql @@ -0,0 +1,152 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1102/kmain-1102-authority-reingest.sql b/KCLS/sql/kmain-1102/kmain-1102-authority-reingest.sql new file mode 100644 index 0000000000..c02513cd0c --- /dev/null +++ b/KCLS/sql/kmain-1102/kmain-1102-authority-reingest.sql @@ -0,0 +1,167 @@ +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; + are_row authority.record_entry%ROWTYPE; + bre_row biblio.record_entry%ROWTYPE; +BEGIN + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + -- Remove the actual linking subfields present in + -- marc bib records that is controlled by this one + FOR bre_row IN SELECT * FROM biblio.record_entry bre + INNER JOIN authority.bib_linking abl + ON bre.id = abl.bib AND abl.authority = NEW.id LOOP + + UPDATE biblio.record_entry + SET marc = (SELECT regexp_replace(bre_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = bre_row.id; + + END LOOP; + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Remove the actual linking subfields present in + -- authority records that target this one + FOR are_row IN SELECT * FROM authority.record_entry auth + INNER JOIN authority.authority_linking aal + ON auth.id = aal.source AND aal.target = NEW.id LOOP + + UPDATE authority.record_entry + SET marc = (SELECT regexp_replace(are_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = are_row.id; + + END LOOP; + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; \ No newline at end of file diff --git a/KCLS/sql/kmain-1102/kmain-1102-metabib-reingest-rollback.sql b/KCLS/sql/kmain-1102/kmain-1102-metabib-reingest-rollback.sql new file mode 100644 index 0000000000..26a8a706eb --- /dev/null +++ b/KCLS/sql/kmain-1102/kmain-1102-metabib-reingest-rollback.sql @@ -0,0 +1,195 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + IF char_length(value_prepped) > 0 THEN + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO bbonner; diff --git a/KCLS/sql/kmain-1102/kmain-1102-metabib-reingest.sql b/KCLS/sql/kmain-1102/kmain-1102-metabib-reingest.sql new file mode 100644 index 0000000000..51ac2e8b69 --- /dev/null +++ b/KCLS/sql/kmain-1102/kmain-1102-metabib-reingest.sql @@ -0,0 +1,190 @@ +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.4 modified by kmain-1060 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + IF char_length(value_prepped) > 0 THEN + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; \ No newline at end of file diff --git a/KCLS/sql/kmain-1128/authority-get_5xx_heading.sql b/KCLS/sql/kmain-1128/authority-get_5xx_heading.sql new file mode 100644 index 0000000000..1759e50d88 --- /dev/null +++ b/KCLS/sql/kmain-1128/authority-get_5xx_heading.sql @@ -0,0 +1,34 @@ +CREATE OR REPLACE FUNCTION authority.get_5xx_heading(w_subfield text) + RETURNS text AS +$BODY$ +-- Function takes a w subfield as input. It returns the heading to be displayed for the +-- reference. The headings are stored in authority.display_5xx_headings. +-- The current version only looks at the first position (0) of the w subfield. It then looks +-- in authority.display_5xx_headings for a match and returns that heading. If no match is found +-- it selects from authority.display_5xx_headings where w subfield position 0 is null, which is +-- the default 5xx heading. (See Also) +-- +-- The table is built to be expandable for the future and this function could be modified to check +-- more positions. +DECLARE + subfield_0 text; + return_heading text; +BEGIN + SELECT substring(w_subfield FROM '.') INTO subfield_0; + + SELECT heading INTO return_heading + FROM authority.display_5xx_headings + WHERE w_subfield_0 = subfield_0; + + IF return_heading IS NULL THEN + SELECT heading INTO return_heading + FROM authority.display_5xx_headings + WHERE w_subfield_0 IS NULL; + END IF; + + RETURN return_heading; +END; +$BODY$ + LANGUAGE plpgsql STABLE; +ALTER FUNCTION authority.get_5xx_heading(w_subfield text) + OWNER TO evergreen; \ No newline at end of file diff --git a/KCLS/sql/kmain-1128/create_and_populate-authority-display_5xx_headings.sql b/KCLS/sql/kmain-1128/create_and_populate-authority-display_5xx_headings.sql new file mode 100644 index 0000000000..c2ac2ed598 --- /dev/null +++ b/KCLS/sql/kmain-1128/create_and_populate-authority-display_5xx_headings.sql @@ -0,0 +1,31 @@ +-- Create authority.display_5xx_headings table +CREATE TABLE authority.display_5xx_headings ( + id serial NOT NULL, + w_subfield_0 character(1), + w_subfield_1 character(1), + w_subfield_2 character(1), + w_subfield_3 character(1), + heading text NOT NULL, + CONSTRAINT reference_headings_pkey PRIMARY KEY (id) +) +WITH ( + OIDS=FALSE +); +ALTER TABLE authority.display_5xx_headings + OWNER TO evergreen; + +-- Add data to authority.display_5xx_headings +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES (NULL,NULL,NULL,NULL,'See Also'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('a',NULL,NULL,NULL,'Earlier Heading'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('b',NULL,NULL,NULL,'Later Heading'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('g',NULL,NULL,NULL,'Narrower Heading'); + +INSERT INTO authority.display_5xx_headings(w_subfield_0, w_subfield_1, w_subfield_2, w_subfield_3, heading) +VALUES ('r',NULL,NULL,NULL,'See Also'); \ No newline at end of file diff --git a/KCLS/sql/kmain-1128/metabib-get_browse_author_entry_marc_record.sql b/KCLS/sql/kmain-1128/metabib-get_browse_author_entry_marc_record.sql new file mode 100644 index 0000000000..dc834fdffa --- /dev/null +++ b/KCLS/sql/kmain-1128/metabib-get_browse_author_entry_marc_record.sql @@ -0,0 +1,27 @@ +-- Function: metabib.get_browse_author_entry_marc_record(bigint) + +-- DROP FUNCTION metabib.get_browse_author_entry_marc_record(bigint); + +CREATE OR REPLACE FUNCTION metabib.get_browse_author_entry_marc_record(browse_entry bigint) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse author entry table and returns the marc for the +-- authority that control the browse author entry. +DECLARE + marc text; +BEGIN + SELECT are.marc INTO marc + FROM authority.record_entry are + JOIN authority.simple_heading ash ON are.id = ash.record + JOIN metabib.browse_author_entry_simple_heading_map mbaeshm ON ash.id = mbaeshm.simple_heading + JOIN metabib.browse_author_entry mbae ON mbaeshm.entry = mbae.id + JOIN authority.control_set_authority_field acsaf ON ash.atag = acsaf.id + WHERE mbae.id = browse_entry AND acsaf.tag ILIKE '1__'; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_author_entry_marc_record(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1128/metabib-get_browse_entry_marc_record.sql b/KCLS/sql/kmain-1128/metabib-get_browse_entry_marc_record.sql new file mode 100644 index 0000000000..c8661134d8 --- /dev/null +++ b/KCLS/sql/kmain-1128/metabib-get_browse_entry_marc_record.sql @@ -0,0 +1,22 @@ +CREATE OR REPLACE FUNCTION metabib.get_browse_entry_marc_record(browse_entry bigint, search_class text) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse entry series of tables and a type to indicate what +-- table to look into. It will then fetch the marc record for that the authority that +-- controls the browse entry. +DECLARE + marc text; +BEGIN + CASE search_class + WHEN 'author' THEN marc = metabib.get_browse_author_entry_marc_record(browse_entry); + WHEN 'subject' THEN marc = metabib.get_browse_subject_entry_marc_record(browse_entry); + WHEN 'series' THEN marc = metabib.get_browse_series_entry_marc_record(browse_entry); + END CASE; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_entry_marc_record(bigint, text) + OWNER TO evergreen; \ No newline at end of file diff --git a/KCLS/sql/kmain-1128/metabib-get_browse_series_entry_marc_record.sql b/KCLS/sql/kmain-1128/metabib-get_browse_series_entry_marc_record.sql new file mode 100644 index 0000000000..dded605f7a --- /dev/null +++ b/KCLS/sql/kmain-1128/metabib-get_browse_series_entry_marc_record.sql @@ -0,0 +1,27 @@ +-- Function: metabib.get_browse_series_entry_marc_record(bigint) + +-- DROP FUNCTION metabib.get_browse_series_entry_marc_record(bigint); + +CREATE OR REPLACE FUNCTION metabib.get_browse_series_entry_marc_record(browse_entry bigint) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse series entry table and returns the marc for the +-- authority that control the browse series entry. +DECLARE + marc text; +BEGIN + SELECT are.marc INTO marc + FROM authority.record_entry are + JOIN authority.simple_heading ash ON are.id = ash.record + JOIN metabib.browse_series_entry_simple_heading_map mbseshm ON ash.id = mbseshm.simple_heading + JOIN metabib.browse_series_entry mbse ON mbseshm.entry = mbse.id + JOIN authority.control_set_authority_field acsaf ON ash.atag = acsaf.id + WHERE mbse.id = browse_entry AND acsaf.tag ILIKE '1__'; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_series_entry_marc_record(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1128/metabib-get_browse_subject_entry_marc_record.sql b/KCLS/sql/kmain-1128/metabib-get_browse_subject_entry_marc_record.sql new file mode 100644 index 0000000000..4fb8a78919 --- /dev/null +++ b/KCLS/sql/kmain-1128/metabib-get_browse_subject_entry_marc_record.sql @@ -0,0 +1,27 @@ +-- Function: metabib.get_browse_subject_entry_marc_record(bigint) + +-- DROP FUNCTION metabib.get_browse_subject_entry_marc_record(bigint); + +CREATE OR REPLACE FUNCTION metabib.get_browse_subject_entry_marc_record(browse_entry bigint) + RETURNS text AS +$BODY$ +-- Function takes an id to the browse subject entry table and returns the marc for the +-- authority that control the browse subject entry. +DECLARE + marc text; +BEGIN + SELECT are.marc INTO marc + FROM authority.record_entry are + JOIN authority.simple_heading ash ON are.id = ash.record + JOIN metabib.browse_subject_entry_simple_heading_map mbseshm ON ash.id = mbseshm.simple_heading + JOIN metabib.browse_subject_entry mbse ON mbseshm.entry = mbse.id + JOIN authority.control_set_authority_field acsaf ON ash.atag = acsaf.id + WHERE mbse.id = browse_entry AND acsaf.tag ILIKE '1__'; + + RETURN marc; +END; +$BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION metabib.get_browse_subject_entry_marc_record(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1128/rollback_scripts/authority-get_5xx_heading-rollback.sql b/KCLS/sql/kmain-1128/rollback_scripts/authority-get_5xx_heading-rollback.sql new file mode 100644 index 0000000000..715ce344f9 --- /dev/null +++ b/KCLS/sql/kmain-1128/rollback_scripts/authority-get_5xx_heading-rollback.sql @@ -0,0 +1,4 @@ +-- Function: authority.get_5xx_heading(bigint) +DROP FUNCTION authority.get_5xx_heading(bigint); + + diff --git a/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_author_entry_marc_record-rollback.sql b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_author_entry_marc_record-rollback.sql new file mode 100644 index 0000000000..956e59ee37 --- /dev/null +++ b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_author_entry_marc_record-rollback.sql @@ -0,0 +1,4 @@ +-- Function: metabib.get_browse_author_entry_marc_record(bigint) +DROP FUNCTION metabib.get_browse_author_entry_marc_record(bigint); + + diff --git a/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_entry_marc_record-rollback.sql b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_entry_marc_record-rollback.sql new file mode 100644 index 0000000000..a4cd384cd9 --- /dev/null +++ b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_entry_marc_record-rollback.sql @@ -0,0 +1,4 @@ +-- Function: metabib.get_browse_entry_marc_record(bigint) +DROP FUNCTION metabib.get_browse_entry_marc_record(bigint); + + diff --git a/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_series_entry_marc_record-rollback.sql b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_series_entry_marc_record-rollback.sql new file mode 100644 index 0000000000..ae1425217c --- /dev/null +++ b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_series_entry_marc_record-rollback.sql @@ -0,0 +1,4 @@ +-- Function: metabib.get_browse_series_entry_marc_record(bigint) +DROP FUNCTION metabib.get_browse_series_entry_marc_record(bigint); + + diff --git a/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_subject_entry_marc_record-rollback.sql b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_subject_entry_marc_record-rollback.sql new file mode 100644 index 0000000000..24a724ef82 --- /dev/null +++ b/KCLS/sql/kmain-1128/rollback_scripts/metabib-get_browse_subject_entry_marc_record-rollback.sql @@ -0,0 +1,4 @@ +-- Function: metabib.get_browse_subject_entry_marc_record(bigint) +DROP FUNCTION metabib.get_browse_subject_entry_marc_record(bigint); + + diff --git a/KCLS/sql/kmain-1199/kmain-1199-rollback.sql b/KCLS/sql/kmain-1199/kmain-1199-rollback.sql new file mode 100644 index 0000000000..056d3027d7 --- /dev/null +++ b/KCLS/sql/kmain-1199/kmain-1199-rollback.sql @@ -0,0 +1,55 @@ +BEGIN; + +--Remove sort_value column from authority.full_rec +ALTER TABLE authority.full_rec + DROP COLUMN sort_value; + +-- Function: authority.flatten_marc(bigint) +-- DROP FUNCTION authority.flatten_marc(bigint); +CREATE OR REPLACE FUNCTION authority.flatten_marc(rid bigint) + RETURNS SETOF authority.full_rec AS +$BODY$ +DECLARE + auth authority.record_entry%ROWTYPE; + output authority.full_rec%ROWTYPE; + field RECORD; +BEGIN + SELECT INTO auth * FROM authority.record_entry WHERE id = rid; + + FOR field IN SELECT * FROM vandelay.flatten_marc( auth.marc ) LOOP + output.record := rid; + output.ind1 := field.ind1; + output.ind2 := field.ind2; + output.tag := field.tag; + output.subfield := field.subfield; + output.value := field.value; + + RETURN NEXT output; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.flatten_marc(bigint) + OWNER TO evergreen; + +-- Function: authority.reingest_authority_full_rec(bigint) +-- DROP FUNCTION authority.reingest_authority_full_rec(bigint); +CREATE OR REPLACE FUNCTION authority.reingest_authority_full_rec(auth_id bigint) + RETURNS void AS +$BODY$ +BEGIN + DELETE FROM authority.full_rec WHERE record = auth_id; + INSERT INTO authority.full_rec (record, tag, ind1, ind2, subfield, value) + SELECT record, tag, ind1, ind2, subfield, value FROM authority.flatten_marc( auth_id ); + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.reingest_authority_full_rec(bigint) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/kmain-1199/kmain-1199.sql b/KCLS/sql/kmain-1199/kmain-1199.sql new file mode 100644 index 0000000000..2657cd12c7 --- /dev/null +++ b/KCLS/sql/kmain-1199/kmain-1199.sql @@ -0,0 +1,56 @@ +BEGIN; + +--add sort_value column to authority.full_rec +ALTER TABLE authority.full_rec + ADD COLUMN sort_value text; + +-- Function: authority.flatten_marc(bigint) +-- DROP FUNCTION authority.flatten_marc(bigint); +CREATE OR REPLACE FUNCTION authority.flatten_marc(rid bigint) + RETURNS SETOF authority.full_rec AS +$BODY$ +DECLARE + auth authority.record_entry%ROWTYPE; + output authority.full_rec%ROWTYPE; + field RECORD; +BEGIN + SELECT INTO auth * FROM authority.record_entry WHERE id = rid; + + FOR field IN SELECT * FROM vandelay.flatten_marc( auth.marc ) LOOP + output.record := rid; + output.ind1 := field.ind1; + output.ind2 := field.ind2; + output.tag := field.tag; + output.subfield := field.subfield; + output.value := field.value; + output.sort_value := naco_normalize(field.value); + + RETURN NEXT output; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.flatten_marc(bigint) + OWNER TO evergreen; + +-- Function: authority.reingest_authority_full_rec(bigint) +-- DROP FUNCTION authority.reingest_authority_full_rec(bigint); +CREATE OR REPLACE FUNCTION authority.reingest_authority_full_rec(auth_id bigint) + RETURNS void AS +$BODY$ +BEGIN + DELETE FROM authority.full_rec WHERE record = auth_id; + INSERT INTO authority.full_rec (record, tag, ind1, ind2, subfield, value, sort_value) + SELECT record, tag, ind1, ind2, subfield, value, sort_value FROM authority.flatten_marc( auth_id ); + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.reingest_authority_full_rec(bigint) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/kmain-1204/update-authority-control-set-bib-field-metabib-field-map-rollback.sql b/KCLS/sql/kmain-1204/update-authority-control-set-bib-field-metabib-field-map-rollback.sql new file mode 100644 index 0000000000..5b8bb319c6 --- /dev/null +++ b/KCLS/sql/kmain-1204/update-authority-control-set-bib-field-metabib-field-map-rollback.sql @@ -0,0 +1,2 @@ +UPDATE authority.control_set_bib_field_metabib_field_map SET metabib_field = 5 +WHERE metabib_field = 1 diff --git a/KCLS/sql/kmain-1204/update-authority-control-set-bib-field-metabib-field-map.sql b/KCLS/sql/kmain-1204/update-authority-control-set-bib-field-metabib-field-map.sql new file mode 100644 index 0000000000..13c39dced4 --- /dev/null +++ b/KCLS/sql/kmain-1204/update-authority-control-set-bib-field-metabib-field-map.sql @@ -0,0 +1,2 @@ +UPDATE authority.control_set_bib_field_metabib_field_map SET metabib_field = 1 +WHERE metabib_field = 5; diff --git a/KCLS/sql/kmain-1264/kmain-1264-add-export-and-import-biblio-record-entry-rollback.sql b/KCLS/sql/kmain-1264/kmain-1264-add-export-and-import-biblio-record-entry-rollback.sql new file mode 100644 index 0000000000..ead9667b9f --- /dev/null +++ b/KCLS/sql/kmain-1264/kmain-1264-add-export-and-import-biblio-record-entry-rollback.sql @@ -0,0 +1,33 @@ +-- Rollback KMAIN-1264 + +ALTER TABLE biblio.record_entry +DROP COLUMN export_date; + +ALTER TABLE biblio.record_entry +DROP COLUMN import_date; + +ALTER TABLE auditor.biblio_record_entry_history +DROP COLUMN export_date; + +ALTER TABLE auditor.biblio_record_entry_history +DROP COLUMN import_date; + +CREATE OR REPLACE FUNCTION auditor.audit_biblio_record_entry_func() + RETURNS trigger AS +$BODY$ + BEGIN + INSERT INTO auditor.biblio_record_entry_history ( audit_id, audit_time, audit_action, audit_user, audit_ws, id, creator, editor, source, quality, create_date, edit_date, active, deleted, fingerprint, tcn_source, tcn_value, marc, last_xact_id, owner, share_depth, cataloging_date ) + SELECT nextval('auditor.biblio_record_entry_pkey_seq'), + now(), + SUBSTR(TG_OP,1,1), + eg_user, + eg_ws, + OLD.id, OLD.creator, OLD.editor, OLD.source, OLD.quality, OLD.create_date, OLD.edit_date, OLD.active, OLD.deleted, OLD.fingerprint, OLD.tcn_source, OLD.tcn_value, OLD.marc, OLD.last_xact_id, OLD.owner, OLD.share_depth, OLD.cataloging_date + FROM auditor.get_audit_info(); + RETURN NULL; + END; + $BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION auditor.audit_biblio_record_entry_func() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1264/kmain-1264-add-export-and-import-biblio-record-entry.sql b/KCLS/sql/kmain-1264/kmain-1264-add-export-and-import-biblio-record-entry.sql new file mode 100644 index 0000000000..72c87707ba --- /dev/null +++ b/KCLS/sql/kmain-1264/kmain-1264-add-export-and-import-biblio-record-entry.sql @@ -0,0 +1,38 @@ +-- Add export and import date to biblio.record_entry. +-- Modify auditor.biblio_record_entry_history to accomidate this. + +ALTER TABLE biblio.record_entry +ADD COLUMN export_date timestamp with time zone; + +ALTER TABLE biblio.record_entry +ADD COLUMN import_date timestamp with time zone; + +ALTER TABLE auditor.biblio_record_entry_history +ADD COLUMN export_date timestamp with time zone; + +ALTER TABLE auditor.biblio_record_entry_history +ADD COLUMN import_date timestamp with time zone; + +-- Function: auditor.audit_biblio_record_entry_func() + +-- DROP FUNCTION auditor.audit_biblio_record_entry_func(); + +CREATE OR REPLACE FUNCTION auditor.audit_biblio_record_entry_func() + RETURNS trigger AS +$BODY$ + BEGIN + INSERT INTO auditor.biblio_record_entry_history ( audit_id, audit_time, audit_action, audit_user, audit_ws, id, creator, editor, source, quality, create_date, edit_date, active, deleted, fingerprint, tcn_source, tcn_value, marc, last_xact_id, owner, share_depth, cataloging_date, export_date, import_date ) + SELECT nextval('auditor.biblio_record_entry_pkey_seq'), + now(), + SUBSTR(TG_OP,1,1), + eg_user, + eg_ws, + OLD.id, OLD.creator, OLD.editor, OLD.source, OLD.quality, OLD.create_date, OLD.edit_date, OLD.active, OLD.deleted, OLD.fingerprint, OLD.tcn_source, OLD.tcn_value, OLD.marc, OLD.last_xact_id, OLD.owner, OLD.share_depth, OLD.cataloging_date, OLD.export_date, OLD.import_date + FROM auditor.get_audit_info(); + RETURN NULL; + END; + $BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION auditor.audit_biblio_record_entry_func() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1271/kmain-1271-case-sensitive-browse-call-number-searches-rollback.sql b/KCLS/sql/kmain-1271/kmain-1271-case-sensitive-browse-call-number-searches-rollback.sql new file mode 100644 index 0000000000..344a77e9cc --- /dev/null +++ b/KCLS/sql/kmain-1271/kmain-1271-case-sensitive-browse-call-number-searches-rollback.sql @@ -0,0 +1,308 @@ +-- Function: biblio.extract_metabib_field_entry(bigint, text) + +-- DROP FUNCTION biblio.extract_metabib_field_entry(bigint, text); + +CREATE OR REPLACE FUNCTION +biblio.extract_metabib_field_entry(rid bigint, default_joiner text) + RETURNS SETOF metabib.field_entry_template AS +$BODY$ +DECLARE + bib biblio.record_entry +%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node +TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT +:= default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template +%ROWTYPE; +BEGIN + --ver2.0 - KMAIN-1162 + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + +output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR +idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from +config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name +THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := +perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM +unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF- +8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN +curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + +raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF +idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY +[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF +idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = +idx.id; + output_row.source = rid; + -- outer regexp_replace keeps all '.' expect the last one. + -- inner +regexp_replace removes all connecting whitespace and replaces it with a single space + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE +(browse_text, E'\\s+', ' ', 'g'), E'\\.$', '', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := +oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, +xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + +IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry +WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END +IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field += true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" +type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + +output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + +output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF +idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( +idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g'), E'\\.+', '', 'g')); + + output_row.facet_field = +TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text +IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + + IF idx.field_class = 'identifier' AND +idx.name = 'bibcn' THEN + output_row.field_class = 'call_number'; + output_row.browse_field = TRUE; + +output_row.sort_value = raw_text; + output_row.value = raw_text; + ELSE + output_row.field_class = idx.field_class; + + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g'), E'\\.+', '', 'g')); + END IF; + + output_row.field = +idx.id; + output_row.source = rid; + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = +FALSE; + END IF; + + END LOOP; + +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION biblio.extract_metabib_field_entry(bigint, text) + +OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION biblio.extract_metabib_field_entry(bigint, text) TO public; +GRANT EXECUTE ON FUNCTION +biblio.extract_metabib_field_entry(bigint, text) TO evergreen; +GRANT EXECUTE ON FUNCTION biblio.extract_metabib_field_entry(bigint, text) TO bbonner; + + +-- Function: metabib.browse_call_number_authority_refs_pivot(text) + +-- DROP FUNCTION metabib.browse_call_number_authority_refs_pivot(text); + +CREATE OR REPLACE +FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + RETURNS bigint AS +$BODY$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or +browse term + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= $1 + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + OWNER TO evergreen; + + +-- Function: metabib.browse_call_number_bib_pivot(text) + +-- DROP FUNCTION metabib.browse_call_number_bib_pivot(text); + +CREATE OR REPLACE FUNCTION +metabib.browse_call_number_bib_pivot(text) + RETURNS bigint AS +$BODY$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or browse term + SELECT +mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= $1 + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql +STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_bib_pivot(text) + OWNER TO evergreen; + +-- function: public.naco_normalize_keep_decimal(text, text); +-- DROP FUNCTION public.naco_normalize_keep_decimal(text, text); diff --git a/KCLS/sql/kmain-1271/kmain-1271-case-sensitive-browse-call-number-searches.sql b/KCLS/sql/kmain-1271/kmain-1271-case-sensitive-browse-call-number-searches.sql new file mode 100644 index 0000000000..1543f5c576 --- /dev/null +++ b/KCLS/sql/kmain-1271/kmain-1271-case-sensitive-browse-call-number-searches.sql @@ -0,0 +1,368 @@ + +-- Function: biblio.extract_metabib_field_entry(bigint, text) + +-- DROP FUNCTION biblio.extract_metabib_field_entry(bigint, text); + +CREATE OR REPLACE FUNCTION +biblio.extract_metabib_field_entry(rid bigint, default_joiner text) + RETURNS SETOF metabib.field_entry_template AS +$BODY$ +DECLARE + bib biblio.record_entry +%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node +TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT +:= default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template +%ROWTYPE; +BEGIN + --ver2.0 - KMAIN-1162 + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + +output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR +idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from +config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name +THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := +perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM +unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF- +8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN +curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + +raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF +idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY +[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF +idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = +idx.id; + output_row.source = rid; + -- outer regexp_replace keeps all '.' expect the last one. + -- inner +regexp_replace removes all connecting whitespace and replaces it with a single space + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE +(browse_text, E'\\s+', ' ', 'g'), E'\\.$', '', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := +oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, +xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + +IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry +WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END +IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field += true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" +type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + +output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + +output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF +idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( +idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g'), E'\\.+', '', 'g')); + + output_row.facet_field = +TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text +IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + + IF idx.field_class = 'identifier' AND +idx.name = 'bibcn' THEN + output_row.field_class = 'call_number'; + output_row.browse_field = TRUE; + + output_row.sort_value = public.naco_normalize_keep_decimal(raw_text,''); + output_row.value = raw_text; + ELSE + +output_row.field_class = idx.field_class; + output_row.value = BTRIM(REGEXP_REPLACE(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g'), E'\\.+', '', +'g')); + END IF; + + output_row.field = idx.id; + output_row.source = rid; + + output_row.search_field = TRUE; + +RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER +FUNCTION biblio.extract_metabib_field_entry(bigint, text) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION biblio.extract_metabib_field_entry(bigint, text) TO +public; +GRANT EXECUTE ON FUNCTION biblio.extract_metabib_field_entry(bigint, text) TO evergreen; +GRANT EXECUTE ON FUNCTION biblio.extract_metabib_field_entry +(bigint, text) TO bbonner; + + +-- Function: metabib.browse_call_number_authority_refs_pivot(text) + +-- DROP FUNCTION metabib.browse_call_number_authority_refs_pivot(text); + +CREATE OR REPLACE +FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + RETURNS bigint AS +$BODY$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or +browse term + SELECT mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= public.naco_normalize_keep_decimal($1, '') + +ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_authority_refs_pivot(text) + OWNER TO +evergreen; + + +-- Function: metabib.browse_call_number_bib_pivot(text) + +-- DROP FUNCTION metabib.browse_call_number_bib_pivot(text); + +CREATE OR REPLACE FUNCTION +metabib.browse_call_number_bib_pivot(text) + RETURNS bigint AS +$BODY$ + --ver2 - KMAIN-1168 - removed public.naco_normalize around $1 or browse term + SELECT +mbe.id + FROM metabib.browse_call_number_entry mbe + WHERE mbe.sort_value >= public.naco_normalize_keep_decimal($1, '') + + ORDER BY mbe.sort_value, +mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_call_number_bib_pivot(text) + OWNER TO evergreen; + +-- Function: public.naco_normalize_keep_decimal(text, text) + +-- DROP FUNCTION public.naco_normalize_keep_decimal(text, text); + +CREATE OR REPLACE FUNCTION public.naco_normalize_keep_decimal(text, text) + RETURNS text AS +$BODY$ use strict; + use Unicode::Normalize; + use Encode; + + my $str = decode_utf8(shift); + my $sf = shift; + + # Altered version of the naco_normalize function + + $str = uc $str; + + # remove non-filing strings + $str =~ s/\x{0098}.*?\x{009C}//g; + + $str = NFKD($str); + + # additional substitutions - 3.6. + $str =~ s/\x{00C6}/AE/g; + $str =~ s/\x{00DE}/TH/g; + $str =~ s/\x{0152}/OE/g; + $str =~ tr/\x{0110}\x{00D0}\x{00D8}\x{0141}\x{2113}\x{02BB}\x{02BC}]['/DDOLl/d; + # transformations based on Unicode category codes + $str =~ s/[\p{Cc}\p{Cf}\p{Co}\p{Cs}\p{Lm}\p{Mc}\p{Me}\p{Mn}]//g; + + if ($sf) {$str =~ s/,/\x{009F}/;} + + $str =~ s/\'//g; + $str =~ s/\,//g; + $str =~ s/\'//g; + # since we`ve stripped out the control characters, we can now + # use a few as placeholders temporarily + $str =~ tr/+&@\x{266D}\x{266F}#/\x01\x02\x03\x04\x05\x06/; + + $str =~ tr/\x01\x02\x03\x04\x05\x06\x07/+&@\x{266D}\x{266F}#,/; + + # intentionally skipping step 8 of the NACO algorithm; if the string + # gets normalized away, that`s fine. + + # leading and trailing spaces + $str =~ s/\s+/ /g; + $str =~ s/^\s+//; + $str =~ s/\s+$//g; + + $str =~ s/\x{009F}/,/; #Put comma back + + return lc $str; +$BODY$ + LANGUAGE plperlu IMMUTABLE STRICT + COST 100; +ALTER FUNCTION public.naco_normalize_keep_decimal(text, text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1312/create_metabib_bib_export_data.sql b/KCLS/sql/kmain-1312/create_metabib_bib_export_data.sql new file mode 100644 index 0000000000..daec3e2e94 --- /dev/null +++ b/KCLS/sql/kmain-1312/create_metabib_bib_export_data.sql @@ -0,0 +1,16 @@ +CREATE TABLE metabib.bib_export_data +( + id bigserial NOT NULL, + bib bigint NOT NULL, + export_date timestamp with time zone, + CONSTRAINT bib_export_data_pkey PRIMARY KEY (id), + CONSTRAINT bib_export_data_bib_fkey FOREIGN KEY (bib) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.bib_export_data + OWNER TO evergreen; +GRANT ALL ON TABLE metabib.bib_export_data TO evergreen; diff --git a/KCLS/sql/kmain-1312/create_metabib_bib_export_data_rollback.sql b/KCLS/sql/kmain-1312/create_metabib_bib_export_data_rollback.sql new file mode 100644 index 0000000000..c0de54f8f9 --- /dev/null +++ b/KCLS/sql/kmain-1312/create_metabib_bib_export_data_rollback.sql @@ -0,0 +1 @@ +DROP TABLE metabib.bib_export_data; diff --git a/KCLS/sql/kmain-1312/metabib_set_export_date.sql b/KCLS/sql/kmain-1312/metabib_set_export_date.sql new file mode 100644 index 0000000000..9af567fe21 --- /dev/null +++ b/KCLS/sql/kmain-1312/metabib_set_export_date.sql @@ -0,0 +1,18 @@ +CREATE OR REPLACE FUNCTION metabib.set_export_date(bib_id bigint, input_date date) + RETURNS void AS +$BODY$ +BEGIN + PERFORM * FROM metabib.bib_export_data WHERE bib = bib_id; + IF FOUND THEN + UPDATE metabib.bib_export_data SET export_date = input_date + WHERE bib = bib_id; + ELSE + INSERT INTO metabib.bib_export_data (bib, export_date) + VALUES (bib_id, input_date); + END IF; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.set_export_date(bigint, date) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1312/metabib_set_export_date_rollback.sql b/KCLS/sql/kmain-1312/metabib_set_export_date_rollback.sql new file mode 100644 index 0000000000..b90197b830 --- /dev/null +++ b/KCLS/sql/kmain-1312/metabib_set_export_date_rollback.sql @@ -0,0 +1 @@ +DROP FUNCTION metabib.set_export_date(bigint, date); diff --git a/KCLS/sql/kmain-1314/kmain-1314-rollback.sql b/KCLS/sql/kmain-1314/kmain-1314-rollback.sql new file mode 100644 index 0000000000..8467f4fe1c --- /dev/null +++ b/KCLS/sql/kmain-1314/kmain-1314-rollback.sql @@ -0,0 +1,298 @@ +-- Function: metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + +-- DROP FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text); + +CREATE OR REPLACE FUNCTION metabib.staged_browse(query text, fields integer[], context_org integer, context_locations integer[], staff boolean, browse_superpage_size integer, count_up_from_zero boolean, result_limit integer, next_pivot_pos integer, search_class text) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + --ver1.1 updated with kmain-806 - added support for the new metabib.browse_____entry_simple_heading_map tables. + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + CASE search_class + WHEN 'author' THEN + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_author_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'title' THEN + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_title_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_title_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'subject' THEN + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_subject_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_subject_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'series' THEN + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_series_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_series_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'call_number' THEN + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_call_number_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + ELSE + + END CASE; + + + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-1314/kmain-1314.sql b/KCLS/sql/kmain-1314/kmain-1314.sql new file mode 100644 index 0000000000..8f40116217 --- /dev/null +++ b/KCLS/sql/kmain-1314/kmain-1314.sql @@ -0,0 +1,363 @@ +-- Function: metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + +-- DROP FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text); + +CREATE OR REPLACE FUNCTION metabib.staged_browse(query text, fields integer[], context_org integer, context_locations integer[], staff boolean, browse_superpage_size integer, count_up_from_zero boolean, result_limit integer, next_pivot_pos integer, search_class text) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; + unauthorized_entry RECORD; + +BEGIN + --ver1.1 updated with kmain-806 - added support for the new metabib.browse_____entry_simple_heading_map tables. + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + CASE search_class + WHEN 'author' THEN + --Is unauthorized, i.e., 4xx on an auth record? + SELECT INTO unauthorized_entry * + FROM metabib.browse_author_entry_simple_heading_map mbeshm + INNER JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + INNER JOIN authority.control_set_authority_field acsaf ON ( acsaf.id = ash.atag AND acsaf.tag like '4__') + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + IF (unauthorized_entry.record IS NOT NULL) THEN + --Do unauthorized procedure, use the authorized term's auth record and it's bibs + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), + STRING_AGG(DISTINCT abl.authority::TEXT, $$,$$), + ARRAY_AGG(DISTINCT map.metabib_field) + FROM authority.bib_linking abl + INNER JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + map.authority_field = unauthorized_entry.atag + AND map.metabib_field = ANY(fields) + ) + WHERE abl.authority = unauthorized_entry.record; + ELSE + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_author_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_author_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'title' THEN + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_title_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_title_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'subject' THEN + --Is unauthorized, i.e., 4xx on an auth record? + SELECT INTO unauthorized_entry * + FROM metabib.browse_subject_entry_simple_heading_map mbeshm + INNER JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + INNER JOIN authority.control_set_authority_field acsaf ON ( acsaf.id = ash.atag AND acsaf.tag like '4__') + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + IF (unauthorized_entry.record IS NOT NULL) THEN + --Do unauthorized procedure, use the authorized term's auth record and it's bibs + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), + STRING_AGG(DISTINCT abl.authority::TEXT, $$,$$), + ARRAY_AGG(DISTINCT map.metabib_field) + FROM authority.bib_linking abl + INNER JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + map.authority_field = unauthorized_entry.atag + AND map.metabib_field = ANY(fields) + ) + WHERE abl.authority = unauthorized_entry.record; + ELSE + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_subject_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_subject_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'series' THEN + --Is unauthorized, i.e., 4xx on an auth record? + SELECT INTO unauthorized_entry * + FROM metabib.browse_series_entry_simple_heading_map mbeshm + INNER JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + INNER JOIN authority.control_set_authority_field acsaf ON ( acsaf.id = ash.atag AND acsaf.tag like '4__') + WHERE mbeshm.entry = rec.id; + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + IF (unauthorized_entry.record IS NOT NULL) THEN + --Do unauthorized procedure, use the authorized term's auth record and it's bibs + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), + STRING_AGG(DISTINCT abl.authority::TEXT, $$,$$), + ARRAY_AGG(DISTINCT map.metabib_field) + FROM authority.bib_linking abl + INNER JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + map.authority_field = unauthorized_entry.atag + AND map.metabib_field = ANY(fields) + ) + WHERE abl.authority = unauthorized_entry.record; + ELSE + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + FROM metabib.browse_series_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_series_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + WHEN 'call_number' THEN + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_call_number_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + ELSE + + END CASE; + + + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.staged_browse(text, integer[], integer, integer[], boolean, integer, boolean, integer, integer, text) + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-1324/metabib-bib-export-data-rollback.sql b/KCLS/sql/kmain-1324/metabib-bib-export-data-rollback.sql new file mode 100644 index 0000000000..daec3e2e94 --- /dev/null +++ b/KCLS/sql/kmain-1324/metabib-bib-export-data-rollback.sql @@ -0,0 +1,16 @@ +CREATE TABLE metabib.bib_export_data +( + id bigserial NOT NULL, + bib bigint NOT NULL, + export_date timestamp with time zone, + CONSTRAINT bib_export_data_pkey PRIMARY KEY (id), + CONSTRAINT bib_export_data_bib_fkey FOREIGN KEY (bib) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.bib_export_data + OWNER TO evergreen; +GRANT ALL ON TABLE metabib.bib_export_data TO evergreen; diff --git a/KCLS/sql/kmain-1324/metabib-bib-export-data.sql b/KCLS/sql/kmain-1324/metabib-bib-export-data.sql new file mode 100644 index 0000000000..307f680c24 --- /dev/null +++ b/KCLS/sql/kmain-1324/metabib-bib-export-data.sql @@ -0,0 +1,17 @@ +CREATE TABLE metabib.bib_export_data +( + id bigserial NOT NULL, + bib bigint NOT NULL, + export_date timestamp with time zone, + import_date timestamp with time zone, + CONSTRAINT bib_export_data_pkey PRIMARY KEY (id), + CONSTRAINT bib_export_data_bib_fkey FOREIGN KEY (bib) + REFERENCES biblio.record_entry (id) MATCH SIMPLE + ON UPDATE NO ACTION ON DELETE NO ACTION +) +WITH ( + OIDS=FALSE +); +ALTER TABLE metabib.bib_export_data + OWNER TO evergreen; +GRANT ALL ON TABLE metabib.bib_export_data TO evergreen; diff --git a/KCLS/sql/kmain-1324/metabib-set-import-date-rollback.sql b/KCLS/sql/kmain-1324/metabib-set-import-date-rollback.sql new file mode 100644 index 0000000000..cb69def98d --- /dev/null +++ b/KCLS/sql/kmain-1324/metabib-set-import-date-rollback.sql @@ -0,0 +1 @@ +DROP FUNCTION metabib.set_import_date(bigint, date); diff --git a/KCLS/sql/kmain-1324/metabib-set-import-date.sql b/KCLS/sql/kmain-1324/metabib-set-import-date.sql new file mode 100644 index 0000000000..95d2c69bcd --- /dev/null +++ b/KCLS/sql/kmain-1324/metabib-set-import-date.sql @@ -0,0 +1,18 @@ +CREATE OR REPLACE FUNCTION metabib.set_import_date(bib_id bigint, input_date date) + RETURNS void AS +$BODY$ +BEGIN + PERFORM * FROM metabib.bib_export_data WHERE bib = bib_id; + IF FOUND THEN + UPDATE metabib.bib_export_data SET import_date = input_date + WHERE bib = bib_id; + ELSE + INSERT INTO metabib.bib_export_data (bib, import_date) + VALUES (bib_id, input_date); + END IF; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.set_import_date(bigint, date) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1521/authority_heading_changed.sql b/KCLS/sql/kmain-1521/authority_heading_changed.sql new file mode 100644 index 0000000000..38be011c1e --- /dev/null +++ b/KCLS/sql/kmain-1521/authority_heading_changed.sql @@ -0,0 +1,27 @@ +-- Function: authority.heading_changed(text, text) + +-- DROP FUNCTION authority.heading_changed(text, text); + +CREATE OR REPLACE FUNCTION authority.heading_changed(old_marc text, new_marc text) + RETURNS boolean AS +$BODY$ + +DECLARE + old_heading text; + new_heading text; +BEGIN + -- Grab the 1XX field of each authority marc. + old_heading := substring(old_marc from ''); + new_heading := substring(new_marc from ''); + + IF old_heading = new_heading THEN + RETURN FALSE; + ELSE + RETURN TRUE; + END IF; + + END $BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION authority.heading_changed(text, text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1521/authority_heading_changed_rollback.sql b/KCLS/sql/kmain-1521/authority_heading_changed_rollback.sql new file mode 100644 index 0000000000..6323516803 --- /dev/null +++ b/KCLS/sql/kmain-1521/authority_heading_changed_rollback.sql @@ -0,0 +1,5 @@ +-- Function: authority.heading_changed(text, text) + +DROP FUNCTION authority.heading_changed(text, text); + + diff --git a/KCLS/sql/kmain-1521/authority_indexing_ingest_or_delete.sql b/KCLS/sql/kmain-1521/authority_indexing_ingest_or_delete.sql new file mode 100644 index 0000000000..a4566d199c --- /dev/null +++ b/KCLS/sql/kmain-1521/authority_indexing_ingest_or_delete.sql @@ -0,0 +1,179 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; + are_row authority.record_entry%ROWTYPE; + bre_row biblio.record_entry%ROWTYPE; +BEGIN + --ver 2.1 KMAIN-1119 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + -- Remove the actual linking subfields present in + -- marc bib records that is controlled by this one + FOR bre_row IN SELECT * FROM biblio.record_entry bre + INNER JOIN authority.bib_linking abl + ON bre.id = abl.bib AND abl.authority = NEW.id LOOP + + UPDATE biblio.record_entry + SET marc = (SELECT regexp_replace(bre_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = bre_row.id; + + END LOOP; + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Remove the actual linking subfields present in + -- authority records that target this one + FOR are_row IN SELECT * FROM authority.record_entry auth + INNER JOIN authority.authority_linking aal + ON auth.id = aal.source AND aal.target = NEW.id LOOP + + UPDATE authority.record_entry + SET marc = (SELECT regexp_replace(are_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = are_row.id; + + END LOOP; + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Did the authority heading change? + IF authority.heading_changed(OLD.marc, NEW.marc) THEN + -- If so propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + END IF; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1521/authority_indexing_ingest_or_delete_rollback.sql b/KCLS/sql/kmain-1521/authority_indexing_ingest_or_delete_rollback.sql new file mode 100644 index 0000000000..915844dcfc --- /dev/null +++ b/KCLS/sql/kmain-1521/authority_indexing_ingest_or_delete_rollback.sql @@ -0,0 +1,176 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + field_id integer; + are_row authority.record_entry%ROWTYPE; + bre_row biblio.record_entry%ROWTYPE; +BEGIN + --ver 2.1 KMAIN-1119 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + -- Remove the actual linking subfields present in + -- marc bib records that is controlled by this one + FOR bre_row IN SELECT * FROM biblio.record_entry bre + INNER JOIN authority.bib_linking abl + ON bre.id = abl.bib AND abl.authority = NEW.id LOOP + + UPDATE biblio.record_entry + SET marc = (SELECT regexp_replace(bre_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = bre_row.id; + + END LOOP; + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Remove the actual linking subfields present in + -- authority records that target this one + FOR are_row IN SELECT * FROM authority.record_entry auth + INNER JOIN authority.authority_linking aal + ON auth.id = aal.source AND aal.target = NEW.id LOOP + + UPDATE authority.record_entry + SET marc = (SELECT regexp_replace(are_row.marc,E']*?code="0">\\([A-Z]+\\)' || NEW.id || '','','g')) + WHERE id = are_row.id; + + END LOOP; + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashs.atag; + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ashs.sort_value + ORDER BY id; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashs.value, ashs.sort_value, substr(ashs.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1528/kmain-1528-rollback.sql b/KCLS/sql/kmain-1528/kmain-1528-rollback.sql new file mode 100644 index 0000000000..199c6b9a87 --- /dev/null +++ b/KCLS/sql/kmain-1528/kmain-1528-rollback.sql @@ -0,0 +1,31 @@ +-- Function: public.export_ids_001(bigint) + +-- DROP FUNCTION public.export_ids_001(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_001(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if the record has at least one 001 field and at least one of those fields should contain “oc” +-- OR has at least one 035 field with "WaOLN" in subfield "a" +DECLARE + v_row_count BIGINT; +BEGIN + + SELECT count(*) INTO v_row_count + FROM metabib.real_full_rec + WHERE ((tag = '001' AND value ILIKE '%oc%') + OR (tag = '035' AND subfield ILIKE 'a' AND value ILIKE '%WaOLN%')) + AND record = record_to_check; + + IF v_row_count > 0 THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION public.export_ids_001(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-1528/kmain-1528.sql b/KCLS/sql/kmain-1528/kmain-1528.sql new file mode 100644 index 0000000000..ed0b417221 --- /dev/null +++ b/KCLS/sql/kmain-1528/kmain-1528.sql @@ -0,0 +1,31 @@ +-- Function: public.export_ids_001(bigint) + +-- DROP FUNCTION public.export_ids_001(bigint); + +CREATE OR REPLACE FUNCTION public.export_ids_001(record_to_check bigint) + RETURNS boolean AS +$BODY$ + +-- Returns true if the record has at least one 001 field and at least one of those fields should contain “oc” +-- OR has at least one 035 field with "WaOLN" in subfield "a" +DECLARE + v_row_count BIGINT; +BEGIN + + SELECT count(*) INTO v_row_count + FROM metabib.real_full_rec + WHERE ((tag = '001' AND (value ILIKE 'oc%' OR value ILIKE 'on%' OR value ILIKE 'wln%')) + OR (tag = '035' AND subfield ILIKE 'a' AND value ILIKE '%WaOLN%')) + AND record = record_to_check; + + IF v_row_count > 0 THEN + RETURN TRUE; + ELSE + RETURN FALSE; + END IF; + +END $BODY$ + LANGUAGE plpgsql STABLE + COST 100; +ALTER FUNCTION public.export_ids_001(bigint) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-223/kmain-223-auth-match.sql b/KCLS/sql/kmain-223/kmain-223-auth-match.sql new file mode 100644 index 0000000000..6ae6d66925 --- /dev/null +++ b/KCLS/sql/kmain-223/kmain-223-auth-match.sql @@ -0,0 +1,488 @@ +BEGIN; + +ALTER TABLE vandelay.authority_match +ADD match_score integer NOT NULL DEFAULT 0; + +DROP TYPE IF EXISTS matched_records CASCADE; +CREATE TYPE matched_records AS ( + create_date timestamp with time zone, + creator integer, + edit_date timestamp with time zone, + id bigint, + quality integer, + source integer, + tcn_source text, + tcn_value text, + _id text, + match_score integer, + match_quality integer +); + +CREATE OR REPLACE FUNCTION vandelay.get_matched_records(queued_bib bigint, bib_type text) + RETURNS SETOF matched_records AS +$BODY$ +DECLARE + m_record matched_records; +BEGIN + + IF bib_type = 'bib' THEN + FOR m_record IN + SELECT create_date, creator, edit_date, bre.id, vqbr.quality, source, tcn_source, tcn_value, + 'null' as _id, vbm.match_score, vbm.quality as match_quality + FROM biblio.record_entry bre + INNER JOIN vandelay.bib_match vbm + ON vbm.eg_record = bre.id + INNER JOIN vandelay.queued_bib_record vqbr + ON vqbr.id = vbm.queued_record + WHERE vqbr.id = queued_bib + ORDER BY bre.id DESC + LOOP + RETURN NEXT m_record; + END LOOP; + ELSE + FOR m_record IN + SELECT create_date, creator, edit_date, are.id, vqar.quality, source, '' as tcn_source, '' as tcn_value, + 'null' as _id, vam.match_score, vam.quality as match_quality + FROM authority.record_entry are + INNER JOIN vandelay.authority_match vam + ON vam.eg_record = are.id + INNER JOIN vandelay.queued_authority_record vqar + ON vqar.id = vam.queued_record + WHERE vqar.id = queued_bib + ORDER BY are.id DESC + LOOP + RETURN NEXT m_record; + END LOOP; + END IF; + RETURN; +END; +$BODY$ + LANGUAGE plpgsql IMMUTABLE + COST 100 + ROWS 1000; +ALTER FUNCTION vandelay.get_matched_records(bigint, text) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay.auto_overlay_authority_record_with_best(import_id bigint, merge_profile_id integer, lwm_ratio_value_p numeric) + RETURNS boolean AS +$BODY$ +DECLARE + eg_id BIGINT; + lwm_ratio_value NUMERIC; +BEGIN + + lwm_ratio_value := COALESCE(lwm_ratio_value_p, 0.0); + + PERFORM * FROM vandelay.queued_authority_record WHERE import_time IS NOT NULL AND id = import_id; + + IF FOUND THEN + RAISE NOTICE 'already imported, cannot auto-overlay'; + RETURN FALSE; + END IF; + + SELECT m.eg_record INTO eg_id + FROM vandelay.authority_match m + JOIN vandelay.queued_authority_record qr ON (m.queued_record = qr.id) + JOIN vandelay.authority_queue q ON (qr.queue = q.id) + JOIN authority.record_entry r ON (r.id = m.eg_record) + WHERE m.queued_record = import_id + AND qr.quality::NUMERIC / COALESCE(NULLIF(m.quality,0),1)::NUMERIC >= lwm_ratio_value + ORDER BY m.match_score DESC, -- required match score + qr.quality::NUMERIC / COALESCE(NULLIF(m.quality,0),1)::NUMERIC DESC, -- quality tie breaker + m.id -- when in doubt, use the first match + LIMIT 1; + + IF eg_id IS NULL THEN + RAISE NOTICE 'incoming record is not of high enough quality'; + RETURN FALSE; + END IF; + + RETURN vandelay.overlay_authority_record( import_id, eg_id, merge_profile_id ); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.auto_overlay_authority_record_with_best(bigint, integer, numeric) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION vandelay.match_auth_record() + RETURNS trigger AS +$BODY$ +DECLARE + incoming_existing_id TEXT; + test_result vandelay.match_set_test_result%ROWTYPE; + tmp_rec BIGINT; + match_set INT; +BEGIN + IF TG_OP IN ('INSERT','UPDATE') AND NEW.imported_as IS NOT NULL THEN + RETURN NEW; + END IF; + + DELETE FROM vandelay.authority_match WHERE queued_record = NEW.id; + + SELECT q.match_set INTO match_set FROM vandelay.authority_queue q WHERE q.id = NEW.queue; + + IF match_set IS NOT NULL THEN + NEW.quality := vandelay.measure_record_quality( NEW.marc, match_set ); + END IF; + + -- Perfect matches on 901$c exit early with a match with high quality. + incoming_existing_id := + oils_xpath_string('//*[@tag="901"]/*[@code="c"][1]', NEW.marc); + + IF incoming_existing_id IS NOT NULL AND incoming_existing_id != '' THEN + SELECT id INTO tmp_rec FROM authority.record_entry WHERE id = incoming_existing_id::bigint; + IF tmp_rec IS NOT NULL THEN + INSERT INTO vandelay.authority_match (queued_record, eg_record, match_score, quality) + SELECT + NEW.id, + b.id, + 9999, + vandelay.measure_record_quality( b.marc, match_set ) + FROM authority.record_entry b + WHERE id = incoming_existing_id::bigint; + END IF; + END IF; + + IF match_set IS NULL THEN + RETURN NEW; + END IF; + + FOR test_result IN SELECT * FROM + vandelay.match_set_test_marcxml_auth(match_set, NEW.marc) LOOP + + INSERT INTO vandelay.authority_match ( queued_record, eg_record, match_score, quality ) + SELECT + NEW.id, + test_result.record, + test_result.quality, + vandelay.measure_record_quality( b.marc, match_set ) + FROM authority.record_entry b + WHERE id = test_result.record; + + END LOOP; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.match_auth_record() + OWNER TO evergreen; + +CREATE TRIGGER zz_match_auth_trigger + BEFORE INSERT OR UPDATE + ON vandelay.queued_authority_record + FOR EACH ROW + EXECUTE PROCEDURE vandelay.match_auth_record(); + + +CREATE OR REPLACE FUNCTION vandelay.match_set_test_marcxml_auth(match_set_id integer, record_xml text) + RETURNS SETOF vandelay.match_set_test_result AS +$BODY$ +DECLARE + tags_rstore HSTORE; + svf_rstore HSTORE; + coal TEXT; + joins TEXT; + query_ TEXT; + wq TEXT; + qvalue INTEGER; + rec RECORD; +BEGIN + tags_rstore := vandelay.flatten_marc_hstore(record_xml); + svf_rstore := vandelay.extract_rec_attrs(record_xml); + + CREATE TEMPORARY TABLE _vandelay_tmp_qrows_auth (q INTEGER); + CREATE TEMPORARY TABLE _vandelay_tmp_jrows_auth (j TEXT); + + -- generate the where clause and return that directly (into wq), and as + -- a side-effect, populate the _vandelay_tmp_[qj]rows tables. + wq := vandelay.get_expr_from_match_set_auth(match_set_id, tags_rstore); + + query_ := 'SELECT DISTINCT(record), '; + + -- qrows table is for the quality bits we add to the SELECT clause + SELECT ARRAY_TO_STRING( + ARRAY_ACCUM('COALESCE(n' || q::TEXT || '.quality, 0)'), ' + ' + ) INTO coal FROM _vandelay_tmp_qrows_auth; + + -- our query string so far is the SELECT clause and the inital FROM. + -- no JOINs yet nor the WHERE clause + query_ := query_ || coal || ' AS quality ' || E'\n'; + + -- jrows table is for the joins we must make (and the real text conditions) + SELECT ARRAY_TO_STRING(ARRAY_ACCUM(j), E'\n') INTO joins + FROM _vandelay_tmp_jrows_auth; + + -- add those joins and the where clause to our query. + query_ := query_ || joins || E'\n' || 'JOIN authority.record_entry bre ON (bre.id = record) ' || 'WHERE ' || wq || ' AND not bre.deleted'; + + -- this will return rows of record,quality + FOR rec IN EXECUTE query_ USING tags_rstore, svf_rstore LOOP + RETURN NEXT rec; + END LOOP; + + DROP TABLE _vandelay_tmp_qrows_auth; + DROP TABLE _vandelay_tmp_jrows_auth; + RETURN; +END; + +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION vandelay.match_set_test_marcxml_auth(integer, text) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay.get_expr_from_match_set_point_auth(node vandelay.match_set_point, tags_rstore hstore) + RETURNS text AS +$BODY$ +DECLARE + q TEXT; + i INTEGER; + this_op TEXT; + children INTEGER[]; + child vandelay.match_set_point; +BEGIN + SELECT ARRAY_ACCUM(id) INTO children FROM vandelay.match_set_point + WHERE parent = node.id; + + IF ARRAY_LENGTH(children, 1) > 0 THEN + this_op := vandelay._get_expr_render_one(node); + q := '('; + i := 1; + WHILE children[i] IS NOT NULL LOOP + SELECT * INTO child FROM vandelay.match_set_point + WHERE id = children[i]; + IF i > 1 THEN + q := q || ' ' || this_op || ' '; + END IF; + i := i + 1; + q := q || vandelay.get_expr_from_match_set_point_auth(child, tags_rstore); + END LOOP; + q := q || ')'; + RETURN q; + ELSIF node.bool_op IS NULL THEN + PERFORM vandelay._get_expr_push_qrow_auth(node); + PERFORM vandelay._get_expr_push_jrow_auth(node, tags_rstore); + RETURN vandelay._get_expr_render_one(node); + ELSE + RETURN ''; + END IF; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.get_expr_from_match_set_point_auth(vandelay.match_set_point, hstore) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay.get_expr_from_match_set_auth(match_set_id integer, tags_rstore hstore) + RETURNS text AS +$BODY$ +DECLARE + root vandelay.match_set_point; +BEGIN + SELECT * INTO root FROM vandelay.match_set_point + WHERE parent IS NULL AND match_set = match_set_id; + + RETURN vandelay.get_expr_from_match_set_point_auth(root, tags_rstore); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.get_expr_from_match_set_auth(integer, hstore) + OWNER TO evergreen; + + +CREATE OR REPLACE FUNCTION vandelay._get_expr_push_qrow_auth(node vandelay.match_set_point) + RETURNS void AS +$BODY$ +DECLARE +BEGIN + INSERT INTO _vandelay_tmp_qrows_auth (q) VALUES (node.id); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay._get_expr_push_qrow_auth(vandelay.match_set_point) + OWNER TO evergreen; + +CREATE OR REPLACE FUNCTION vandelay._get_expr_push_jrow_auth(node vandelay.match_set_point, tags_rstore hstore) + RETURNS void AS +$BODY$ +DECLARE + jrow TEXT; + my_alias TEXT; + op TEXT; + tagkey TEXT; + caseless BOOL; + jrow_count INT; + my_using TEXT; + my_join TEXT; +BEGIN + -- remember $1 is tags_rstore, and $2 is svf_rstore + + caseless := FALSE; + SELECT COUNT(*) INTO jrow_count FROM _vandelay_tmp_jrows_auth; + IF jrow_count > 0 THEN + my_using := ' USING (record)'; + my_join := 'FULL OUTER JOIN'; + ELSE + my_using := ''; + my_join := 'FROM'; + END IF; + + IF node.tag IS NOT NULL THEN + caseless := (node.tag IN ('020', '022', '024')); + tagkey := node.tag; + IF node.subfield IS NOT NULL THEN + tagkey := tagkey || node.subfield; + END IF; + END IF; + + IF node.negate THEN + IF caseless THEN + op := 'NOT LIKE'; + ELSE + op := '<>'; + END IF; + ELSE + IF caseless THEN + op := 'LIKE'; + ELSE + op := '='; + END IF; + END IF; + + my_alias := 'n' || node.id::TEXT; + + jrow := my_join || ' (SELECT *, '; + IF node.tag IS NOT NULL THEN + jrow := jrow || node.quality || + ' AS quality FROM authority.full_rec mfr WHERE mfr.tag = ''' || + node.tag || ''''; + IF node.subfield IS NOT NULL THEN + jrow := jrow || ' AND mfr.subfield = ''' || + node.subfield || ''''; + END IF; + jrow := jrow || ' AND ('; + jrow := jrow || vandelay._node_tag_comparisons(caseless, op, tags_rstore, tagkey); + jrow := jrow || ')) ' || my_alias || my_using || E'\n'; + ELSE -- svf + jrow := jrow || 'id AS record, ' || node.quality || + ' AS quality FROM metabib.record_attr mra WHERE mra.attrs->''' || + node.svf || ''' ' || op || ' $2->''' || node.svf || ''') ' || + my_alias || my_using || E'\n'; + END IF; + INSERT INTO _vandelay_tmp_jrows_auth (j) VALUES (jrow); +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay._get_expr_push_jrow_auth(vandelay.match_set_point, hstore) + OWNER TO evergreen; + +-- Function: vandelay.extract_rec_attrs(text, text[]) + +-- DROP FUNCTION vandelay.extract_rec_attrs(text, text[]); + +CREATE OR REPLACE FUNCTION vandelay.extract_rec_attrs(xml text, attr_defs text[]) + RETURNS hstore AS +$BODY$ +DECLARE + transformed_xml TEXT; + prev_xfrm TEXT; + normalizer RECORD; + xfrm config.xml_transform%ROWTYPE; + attr_value TEXT; + new_attrs HSTORE := ''::HSTORE; + attr_def config.record_attr_definition%ROWTYPE; +BEGIN + + FOR attr_def IN SELECT * FROM config.record_attr_definition WHERE name IN (SELECT * FROM UNNEST(attr_defs)) ORDER BY format LOOP + + IF attr_def.tag IS NOT NULL THEN -- tag (and optional subfield list) selection + SELECT ARRAY_TO_STRING(ARRAY_ACCUM(x.value), COALESCE(attr_def.joiner,' ')) INTO attr_value + FROM vandelay.flatten_marc(xml) AS x + WHERE x.tag LIKE attr_def.tag + AND CASE + WHEN attr_def.sf_list IS NOT NULL + THEN POSITION(x.subfield IN attr_def.sf_list) > 0 + ELSE TRUE + END + GROUP BY x.tag + ORDER BY x.tag + LIMIT 1; + + ELSIF attr_def.fixed_field IS NOT NULL THEN -- a named fixed field, see config.marc21_ff_pos_map.fixed_field + attr_value := vandelay.marc21_extract_fixed_field(xml, attr_def.fixed_field); + + ELSIF attr_def.xpath IS NOT NULL THEN -- and xpath expression + + SELECT INTO xfrm * FROM config.xml_transform WHERE name = attr_def.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(xml,xfrm.xslt); + ELSE + transformed_xml := xml; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + IF xfrm.name IS NULL THEN + -- just grab the marcxml (empty) transform + SELECT INTO xfrm * FROM config.xml_transform WHERE xslt = '---' LIMIT 1; + prev_xfrm := xfrm.name; + END IF; + + attr_value := oils_xpath_string(attr_def.xpath, transformed_xml, COALESCE(attr_def.joiner,' '), ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]]); + + ELSIF attr_def.phys_char_sf IS NOT NULL THEN -- a named Physical Characteristic, see config.marc21_physical_characteristic_*_map + SELECT m.value::TEXT INTO attr_value + FROM vandelay.marc21_physical_characteristics(xml) v + JOIN config.marc21_physical_characteristic_value_map m ON (m.id = v.value) + WHERE v.subfield = attr_def.phys_char_sf + LIMIT 1; -- Just in case ... + + END IF; + + -- apply index normalizers to attr_value + FOR normalizer IN + SELECT n.func AS func, + n.param_count AS param_count, + m.params AS params + FROM config.index_normalizer n + JOIN config.record_attr_index_norm_map m ON (m.norm = n.id) + WHERE attr = attr_def.name + ORDER BY m.pos LOOP + EXECUTE 'SELECT ' || normalizer.func || '(' || + quote_nullable( attr_value ) || + CASE + WHEN normalizer.param_count > 0 + THEN ',' || REPLACE(REPLACE(BTRIM(normalizer.params,'[]'),E'\'',E'\\\''),E'"',E'\'') + ELSE '' + END || + ')' INTO attr_value; + + END LOOP; + + -- Add the new value to the hstore + new_attrs := new_attrs || hstore( attr_def.name, attr_value ); + + END LOOP; + + RETURN new_attrs; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.extract_rec_attrs(text, text[]) + OWNER TO evergreen; + +COMMIT; diff --git a/KCLS/sql/kmain-223/kmain-223-rollback.sql b/KCLS/sql/kmain-223/kmain-223-rollback.sql new file mode 100644 index 0000000000..b04240b8c5 --- /dev/null +++ b/KCLS/sql/kmain-223/kmain-223-rollback.sql @@ -0,0 +1,114 @@ +--KMAIN-551 ROLLBACK +BEGIN; + +DROP FUNCTION vandelay.match_auth_record() CASCADE; +DROP FUNCTION vandelay.match_set_test_marcxml_auth(integer, text); +DROP FUNCTION vandelay.get_expr_from_match_set_point_auth(vandelay.match_set_point, hstore); +DROP FUNCTION vandelay.get_expr_from_match_set_auth(integer, hstore); +DROP FUNCTION vandelay._get_expr_push_qrow_auth(vandelay.match_set_point); +DROP FUNCTION vandelay._get_expr_push_jrow_auth(vandelay.match_set_point, hstore); +DROP FUNCTION vandelay.auto_overlay_authority_record_with_best(bigint, integer, numeric); +DROP TYPE IF EXISTS matched_records CASCADE; + +ALTER TABLE vandelay.authority_match +DROP COLUMN match_score; + +CREATE OR REPLACE FUNCTION vandelay.extract_rec_attrs(xml text, attr_defs text[]) + RETURNS hstore AS +$BODY$ +DECLARE + transformed_xml TEXT; + prev_xfrm TEXT; + normalizer RECORD; + xfrm config.xml_transform%ROWTYPE; + attr_value TEXT; + new_attrs HSTORE := ''::HSTORE; + attr_def config.record_attr_definition%ROWTYPE; +BEGIN + + FOR attr_def IN SELECT * FROM config.record_attr_definition WHERE name IN (SELECT * FROM UNNEST(attr_defs)) ORDER BY format LOOP + + IF attr_def.tag IS NOT NULL THEN -- tag (and optional subfield list) selection + SELECT ARRAY_TO_STRING(ARRAY_ACCUM(x.value), COALESCE(attr_def.joiner,' ')) INTO attr_value + FROM vandelay.flatten_marc(xml) AS x + WHERE x.tag LIKE attr_def.tag + AND CASE + WHEN attr_def.sf_list IS NOT NULL + THEN POSITION(x.subfield IN attr_def.sf_list) > 0 + ELSE TRUE + END + GROUP BY x.tag + ORDER BY x.tag + LIMIT 1; + + ELSIF attr_def.fixed_field IS NOT NULL THEN -- a named fixed field, see config.marc21_ff_pos_map.fixed_field + attr_value := vandelay.marc21_extract_fixed_field(xml, attr_def.fixed_field); + + ELSIF attr_def.xpath IS NOT NULL THEN -- and xpath expression + + SELECT INTO xfrm * FROM config.xml_transform WHERE name = attr_def.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(xml,xfrm.xslt); + ELSE + transformed_xml := xml; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + IF xfrm.name IS NULL THEN + -- just grab the marcxml (empty) transform + SELECT INTO xfrm * FROM config.xml_transform WHERE xslt = '---' LIMIT 1; + prev_xfrm := xfrm.name; + END IF; + + attr_value := oils_xpath_string(attr_def.xpath, transformed_xml, COALESCE(attr_def.joiner,' '), ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]]); + + ELSIF attr_def.phys_char_sf IS NOT NULL THEN -- a named Physical Characteristic, see config.marc21_physical_characteristic_*_map + SELECT m.value::TEXT INTO attr_value + FROM vandelay.marc21_physical_characteristics(xml) v + JOIN config.marc21_physical_characteristic_value_map m ON (m.id = v.value) + WHERE v.subfield = attr_def.phys_char_sf + LIMIT 1; -- Just in case ... + + END IF; + + -- apply index normalizers to attr_value + FOR normalizer IN + SELECT n.func AS func, + n.param_count AS param_count, + m.params AS params + FROM config.index_normalizer n + JOIN config.record_attr_index_norm_map m ON (m.norm = n.id) + WHERE attr = attr_def.name + ORDER BY m.pos LOOP + EXECUTE 'SELECT ' || normalizer.func || '(' || + quote_literal( attr_value ) || + CASE + WHEN normalizer.param_count > 0 + THEN ',' || REPLACE(REPLACE(BTRIM(normalizer.params,'[]'),E'\'',E'\\\''),E'"',E'\'') + ELSE '' + END || + ')' INTO attr_value; + + END LOOP; + + -- Add the new value to the hstore + new_attrs := new_attrs || hstore( attr_def.name, attr_value ); + + END LOOP; + + RETURN new_attrs; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION vandelay.extract_rec_attrs(text, text[]) + OWNER TO evergreen; + +COMMIT; + diff --git a/KCLS/sql/kmain-762/metabib-browse-author-authority-refs-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-author-authority-refs-pivot.sql new file mode 100644 index 0000000000..c90738fa47 --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-author-authority-refs-pivot.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_author_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbae.id + FROM metabib.browse_author_entry mbae + JOIN metabib.browse_author_entry_simple_heading_map mbaeshm ON ( mbaeshm.entry = mbae.id ) + JOIN authority.simple_heading ash ON ( mbaeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbae.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbae.sort_value), public.replace_ampersand(mbae.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-author-bib-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-author-bib-pivot.sql new file mode 100644 index 0000000000..4d63d67f0c --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-author-bib-pivot.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_author_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_author_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_author_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_author_entry mbe + JOIN metabib.browse_author_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-series-authority-refs-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-series-authority-refs-pivot.sql new file mode 100644 index 0000000000..9e4ce65b2d --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-series-authority-refs-pivot.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_series_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbse.id + FROM metabib.browse_series_entry mbse + JOIN metabib.browse_series_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbse.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbse.sort_value), public.replace_ampersand(mbse.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-series-bib-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-series-bib-pivot.sql new file mode 100644 index 0000000000..0a59b0a996 --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-series-bib-pivot.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_series_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_series_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_series_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_series_entry mbe + JOIN metabib.browse_series_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-subject-authority-refs-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-subject-authority-refs-pivot.sql new file mode 100644 index 0000000000..13847d32b6 --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-subject-authority-refs-pivot.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_subject_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbse.id + FROM metabib.browse_subject_entry mbse + JOIN metabib.browse_subject_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbse.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbse.sort_value), public.replace_ampersand(mbse.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-subject-bib-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-subject-bib-pivot.sql new file mode 100644 index 0000000000..3190dbd615 --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-subject-bib-pivot.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_subject_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_subject_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_subject_entry mbe + JOIN metabib.browse_subject_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-text.sql b/KCLS/sql/kmain-762/metabib-browse-text.sql new file mode 100644 index 0000000000..57ce8ba73a --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-text.sql @@ -0,0 +1,177 @@ +-- Function: metabib.browse(text, text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + --ver1.1 updated with kmain-806 + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + public.replace_ampersand(mbe.sort_value) + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + )'; + IF search_class != 'call_number' THEN + + core_query := core_query || ' OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + )'; + + END IF; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ') AND public.replace_ampersand(mbe.truncated_sort_value) <= ' || quote_literal(public.replace_ampersand(pivot_sort_value)) || --<< + ' ORDER BY public.replace_ampersand(mbe.truncated_sort_value) DESC, public.replace_ampersand(mbe.value) DESC '; + + -- This variant browses forward. + forward_query := core_query || + ') AND public.replace_ampersand(mbe.truncated_sort_value) > ' || quote_literal(public.replace_ampersand(pivot_sort_value)) || --<< + ' ORDER BY public.replace_ampersand(mbe.truncated_sort_value), public.replace_ampersand(mbe.value) '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-762/metabib-browse-title-authority-refs-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-title-authority-refs-pivot.sql new file mode 100644 index 0000000000..3aaf3f6493 --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-title-authority-refs-pivot.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_title_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbte.id + FROM metabib.browse_title_entry mbte + JOIN metabib.browse_title_entry_simple_heading_map mbteshm ON ( mbteshm.entry = mbte.id ) + JOIN authority.simple_heading ash ON ( mbteshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE public.replace_ampersand(mbte.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbte.sort_value), public.replace_ampersand(mbte.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/metabib-browse-title-bib-pivot.sql b/KCLS/sql/kmain-762/metabib-browse-title-bib-pivot.sql new file mode 100644 index 0000000000..4fb2d68f2a --- /dev/null +++ b/KCLS/sql/kmain-762/metabib-browse-title-bib-pivot.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_title_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_title_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_title_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_title_entry mbe + JOIN metabib.browse_title_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE public.replace_ampersand(mbe.sort_value) >= public.replace_ampersand(public.naco_normalize($2)) + ORDER BY public.replace_ampersand(mbe.sort_value), public.replace_ampersand(mbe.value) LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_bib_pivot(integer[], text) + OWNER TO evergreen; \ No newline at end of file diff --git a/KCLS/sql/kmain-762/public-replace-ampersand.sql b/KCLS/sql/kmain-762/public-replace-ampersand.sql new file mode 100644 index 0000000000..ad547f2628 --- /dev/null +++ b/KCLS/sql/kmain-762/public-replace-ampersand.sql @@ -0,0 +1,13 @@ +-- Function: public.replace_ampersand(text) + +-- DROP FUNCTION public.replace_ampersand(text); + +CREATE OR REPLACE FUNCTION public.replace_ampersand(text) + RETURNS text AS +$BODY$ + SELECT REGEXP_REPLACE( $1, '&|&', 'and', 'g' ); +$BODY$ + LANGUAGE sql IMMUTABLE STRICT + COST 100; +ALTER FUNCTION public.replace_ampersand(text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-author-authority-refs-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-author-authority-refs-pivot-rollback.sql new file mode 100644 index 0000000000..20e2e04537 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-author-authority-refs-pivot-rollback.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_author_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbae.id + FROM metabib.browse_author_entry mbae + JOIN metabib.browse_author_entry_simple_heading_map mbaeshm ON ( mbaeshm.entry = mbae.id ) + JOIN authority.simple_heading ash ON ( mbaeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbae.sort_value >= public.naco_normalize($2) + ORDER BY mbae.sort_value, mbae.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-author-bib-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-author-bib-pivot-rollback.sql new file mode 100644 index 0000000000..7afc4c7cf3 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-author-bib-pivot-rollback.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_author_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_author_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_author_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_author_entry mbe + JOIN metabib.browse_author_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_author_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-series-authority-refs-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-series-authority-refs-pivot-rollback.sql new file mode 100644 index 0000000000..cc3b5f7cc9 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-series-authority-refs-pivot-rollback.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_series_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbse.id + FROM metabib.browse_series_entry mbse + JOIN metabib.browse_series_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbse.sort_value >= public.naco_normalize($2) + ORDER BY mbse.sort_value, mbse.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-series-bib-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-series-bib-pivot-rollback.sql new file mode 100644 index 0000000000..1be966ad47 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-series-bib-pivot-rollback.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_series_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_series_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_series_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_series_entry mbe + JOIN metabib.browse_series_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_series_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-subject-authority-refs-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-subject-authority-refs-pivot-rollback.sql new file mode 100644 index 0000000000..35c2a2bfe3 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-subject-authority-refs-pivot-rollback.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_subject_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbse.id + FROM metabib.browse_subject_entry mbse + JOIN metabib.browse_subject_entry_simple_heading_map mbseshm ON ( mbseshm.entry = mbse.id ) + JOIN authority.simple_heading ash ON ( mbseshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbse.sort_value >= public.naco_normalize($2) + ORDER BY mbse.sort_value, mbse.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-subject-bib-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-subject-bib-pivot-rollback.sql new file mode 100644 index 0000000000..fe2633174c --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-subject-bib-pivot-rollback.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_subject_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_subject_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_subject_entry mbe + JOIN metabib.browse_subject_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_subject_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-text-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-text-rollback.sql new file mode 100644 index 0000000000..5fb3ca6a71 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-text-rollback.sql @@ -0,0 +1,177 @@ +-- Function: metabib.browse(text, text, integer, integer, boolean, bigint, integer) + +-- DROP FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer); + +CREATE OR REPLACE FUNCTION metabib.browse(search_class text, browse_term text, context_org integer DEFAULT NULL::integer, context_loc_group integer DEFAULT NULL::integer, staff boolean DEFAULT false, pivot_id bigint DEFAULT NULL::bigint, result_limit integer DEFAULT 10) + RETURNS SETOF metabib.flat_browse_entry_appearance AS +$BODY$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + search_field INT[]; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + --ver1.1 updated with kmain-806 + -- Get search field int list with search_class + IF search_class = 'id|bibcn' THEN + + SELECT INTO search_class 'call_number'; + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = 'identifier' AND name = 'bibcn'; + + IF pivot_id IS NULL THEN + + pivot_id := metabib.browse_call_number_pivot(browse_term); + + END IF; + ELSE + + SELECT INTO search_field COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class; + + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + + CASE search_class + WHEN 'author' THEN pivot_id := metabib.browse_author_pivot(search_field, browse_term); + WHEN 'title' THEN pivot_id := metabib.browse_title_pivot(search_field, browse_term); + WHEN 'subject' THEN pivot_id := metabib.browse_subject_pivot(search_field, browse_term); + WHEN 'series' THEN pivot_id := metabib.browse_series_pivot(search_field, browse_term); + + END CASE; + END IF; + END IF; + + CASE search_class + WHEN 'author' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_author_entry WHERE id = pivot_id; + WHEN 'title' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_title_entry WHERE id = pivot_id; + WHEN 'subject' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_subject_entry WHERE id = pivot_id; + WHEN 'series' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_series_entry WHERE id = pivot_id; + WHEN 'call_number' THEN + SELECT INTO pivot_sort_value, pivot_sort_fallback + truncated_sort_value, value + FROM metabib.browse_call_number_entry WHERE id = pivot_id; + + END CASE; + + --<< + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_' || search_class || '_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + )'; + IF search_class != 'call_number' THEN + + core_query := core_query || ' OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_' || search_class || '_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + )'; + + END IF; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ') AND mbe.truncated_sort_value <= ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ') AND mbe.truncated_sort_value > ' || quote_literal(pivot_sort_value) || --<< + ' ORDER BY mbe.truncated_sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot, + search_class + ) UNION ALL + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot, + search_class + ) ORDER BY row_number DESC; + +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION metabib.browse(text, text, integer, integer, boolean, bigint, integer) + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-title-authority-refs-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-title-authority-refs-pivot-rollback.sql new file mode 100644 index 0000000000..6a0b28fb8c --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-title-authority-refs-pivot-rollback.sql @@ -0,0 +1,23 @@ +-- Function: metabib.browse_title_authority_refs_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.1 updated with kmain-806 + SELECT mbte.id + FROM metabib.browse_title_entry mbte + JOIN metabib.browse_title_entry_simple_heading_map mbteshm ON ( mbteshm.entry = mbte.id ) + JOIN authority.simple_heading ash ON ( mbteshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbte.sort_value >= public.naco_normalize($2) + ORDER BY mbte.sort_value, mbte.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_authority_refs_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-title-bib-pivot-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-title-bib-pivot-rollback.sql new file mode 100644 index 0000000000..55c2e13916 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/metabib-browse-title-bib-pivot-rollback.sql @@ -0,0 +1,21 @@ +-- Function: metabib.browse_title_bib_pivot(integer[], text) + +-- DROP FUNCTION metabib.browse_title_bib_pivot(integer[], text); + +CREATE OR REPLACE FUNCTION metabib.browse_title_bib_pivot(integer[], text) + RETURNS bigint AS +$BODY$ + --ver1.0 + SELECT mbe.id + FROM metabib.browse_title_entry mbe + JOIN metabib.browse_title_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$BODY$ + LANGUAGE sql STABLE + COST 100; +ALTER FUNCTION metabib.browse_title_bib_pivot(integer[], text) + OWNER TO evergreen; diff --git a/KCLS/sql/kmain-762/rollback_scripts/public-replace-ampersand-rollback.sql b/KCLS/sql/kmain-762/rollback_scripts/public-replace-ampersand-rollback.sql new file mode 100644 index 0000000000..d457438754 --- /dev/null +++ b/KCLS/sql/kmain-762/rollback_scripts/public-replace-ampersand-rollback.sql @@ -0,0 +1,5 @@ +-- Function: public.replace_ampersand(text) + +DROP FUNCTION public.replace_ampersand(text); + + diff --git a/KCLS/sql/kmain-903/kmain-903-rollback.sql b/KCLS/sql/kmain-903/kmain-903-rollback.sql new file mode 100644 index 0000000000..0005b8b85f --- /dev/null +++ b/KCLS/sql/kmain-903/kmain-903-rollback.sql @@ -0,0 +1,61 @@ +-- Function: authority.calculate_authority_linking(bigint, integer, xml) + +-- DROP FUNCTION authority.calculate_authority_linking(bigint, integer, xml); + +CREATE OR REPLACE FUNCTION authority.calculate_authority_linking(rec_id bigint, rec_control_set integer, rec_marc_xml xml) + RETURNS SETOF authority.authority_linking AS +$BODY$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + link TEXT; + aal authority.authority_linking%ROWTYPE; +BEGIN + IF rec_control_set IS NULL THEN + -- No control_set on record? Guess at one + SELECT control_set INTO rec_control_set + FROM authority.control_set_authority_field + WHERE tag IN ( + SELECT UNNEST( + XPATH('//*[starts-with(@tag,"1")]/@tag',rec_marc_xml)::TEXT[] + ) + ) LIMIT 1; + + IF NOT FOUND THEN + RAISE WARNING 'Could not even guess at control set for authority record %', rec_id; + RETURN; + END IF; + END IF; + + aal.source := rec_id; + + FOR acsaf IN + SELECT * FROM authority.control_set_authority_field + WHERE control_set = rec_control_set + AND linking_subfield IS NOT NULL + AND main_entry IS NOT NULL + LOOP + link := SUBSTRING( + (XPATH('//*[@tag="' || acsaf.tag || '"]/*[@code="' || + acsaf.linking_subfield || '"]/text()', rec_marc_xml))[1]::TEXT, + '\d+$' + ); + + -- Ignore links that are null, malformed, circular, or point to + -- non-existent authority records. + IF link IS NOT NULL AND link::BIGINT <> rec_id THEN + PERFORM * FROM authority.record_entry WHERE id = link::BIGINT; + IF FOUND THEN + aal.target := link::BIGINT; + aal.field := acsaf.id; + RETURN NEXT aal; + END IF; + END IF; + END LOOP; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100 + ROWS 1000; +ALTER FUNCTION authority.calculate_authority_linking(bigint, integer, xml) + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-903/kmain-903.sql b/KCLS/sql/kmain-903/kmain-903.sql new file mode 100644 index 0000000000..15df1f64c8 --- /dev/null +++ b/KCLS/sql/kmain-903/kmain-903.sql @@ -0,0 +1,66 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION authority.calculate_authority_linking( + rec_id BIGINT, rec_control_set INT, rec_marc_xml XML +) RETURNS SETOF authority.authority_linking AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + link TEXT; + aal authority.authority_linking%ROWTYPE; +BEGIN + IF rec_control_set IS NULL THEN + -- No control_set on record? Guess at one + SELECT control_set INTO rec_control_set + FROM authority.control_set_authority_field + WHERE tag IN ( + SELECT UNNEST( + XPATH('//*[starts-with(@tag,"1")]/@tag',rec_marc_xml)::TEXT[] + ) + ) LIMIT 1; + + IF NOT FOUND THEN + RAISE WARNING 'Could not even guess at control set for authority record %', rec_id; + RETURN; + END IF; + END IF; + + aal.source := rec_id; + + FOR acsaf IN + SELECT * FROM authority.control_set_authority_field + WHERE control_set = rec_control_set + AND linking_subfield IS NOT NULL + AND main_entry IS NOT NULL + LOOP + -- Loop over the trailing-number contents of all linking subfields + FOR link IN + SELECT SUBSTRING( x::TEXT, '\d+$' ) + FROM UNNEST( + XPATH( + '//*[@tag="' + || acsaf.tag + || '"]/*[@code="' + || acsaf.linking_subfield + || '"]/text()', + rec_marc_xml + ) + ) x + LOOP + + -- Ignore links that are null, malformed, circular, or point to + -- non-existent authority records. + IF link IS NOT NULL AND link::BIGINT <> rec_id THEN + PERFORM * FROM authority.record_entry WHERE id = link::BIGINT; + IF FOUND THEN + aal.target := link::BIGINT; + aal.field := acsaf.id; + RETURN NEXT aal; + END IF; + END IF; + END LOOP; + END LOOP; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; + diff --git a/KCLS/sql/kmain-972/public.export-generate-ids.sql b/KCLS/sql/kmain-972/public.export-generate-ids.sql new file mode 100644 index 0000000000..caa4d5bef7 --- /dev/null +++ b/KCLS/sql/kmain-972/public.export-generate-ids.sql @@ -0,0 +1,34 @@ +-- Function: public.export_generate_ids() + +-- DROP FUNCTION public.export_generate_ids(); + +CREATE OR REPLACE FUNCTION public.export_generate_ids(start_date date, end_date date) + RETURNS SETOF bigint AS +$BODY$ +-- Generates a list of ids for exporting based on the following criteria +-- For a record to be exported it MUST MEET ALL of the following conditions +-- 1.The record must have at least one LDR field and byte 05 of every instance of that field must not be ‘d’. +-- AND +-- 2. The record must have at least one 001 field and at least one of those fields should contain “oc” +-- OR has at least one 035 field with "WaOLN" in subfield "a" +-- AND +-- 3. The record must have at least one of the following fields 086 or 092 or 099 and every instance of each of these fields must not contain “ON ORDER” +-- AND +-- 4. The record must have items attached OR the record must have at least one 998 field with subfield d that has one of the following values ‘d’ or ‘t’ or ‘v’ or ‘w’ or ‘x’ or ‘y’ or ‘1’ +-- AND +-- 5. The records Cat Date must fall between two dates supplied by the user. + + + SELECT id + FROM biblio.record_entry + WHERE public.export_ids_001(id) AND public.export_ids_086_092_099(id) AND public.export_ids_LDR(id) + AND public.export_ids_998(id) AND public.export_ids_cat_date(id, start_date, end_date); + + +$BODY$ + LANGUAGE sql STABLE; +ALTER FUNCTION public.export_generate_ids() + OWNER TO evergreen; + + + diff --git a/KCLS/sql/kmain-972/rollback_scripts/public.export-generate-ids-params-rollback.sql b/KCLS/sql/kmain-972/rollback_scripts/public.export-generate-ids-params-rollback.sql new file mode 100644 index 0000000000..80640797f4 --- /dev/null +++ b/KCLS/sql/kmain-972/rollback_scripts/public.export-generate-ids-params-rollback.sql @@ -0,0 +1,4 @@ +-- Function: public.export_generate_ids() + +DROP FUNCTION public.export_generate_ids(start_date date, end_date date); + diff --git a/KCLS/sql/kmain-976/kmain-976-constraints-rollback.sql b/KCLS/sql/kmain-976/kmain-976-constraints-rollback.sql new file mode 100644 index 0000000000..9678170d48 --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-constraints-rollback.sql @@ -0,0 +1,5 @@ +ALTER TABLE metabib.browse_author_entry DROP CONSTRAINT browse_author_entry_sort_value_value_key; +ALTER TABLE metabib.browse_call_number_entry DROP CONSTRAINT browse_call_number_entry_sort_value_value_key; +ALTER TABLE metabib.browse_series_entry DROP CONSTRAINT browse_series_entry_sort_value_value_key; +ALTER TABLE metabib.browse_subject_entry DROP CONSTRAINT browse_subject_entry_sort_value_value_key; +ALTER TABLE metabib.browse_title_entry DROP CONSTRAINT browse_title_entry_sort_value_value_key; diff --git a/KCLS/sql/kmain-976/kmain-976-constraints.sql b/KCLS/sql/kmain-976/kmain-976-constraints.sql new file mode 100644 index 0000000000..77e2361fec --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-constraints.sql @@ -0,0 +1,14 @@ +ALTER TABLE metabib.browse_author_entry + ADD CONSTRAINT browse_author_entry_sort_value_value_key UNIQUE(sort_value, value); + +ALTER TABLE metabib.browse_call_number_entry + ADD CONSTRAINT browse_call_number_entry_sort_value_value_key UNIQUE(sort_value, value); + +ALTER TABLE metabib.browse_series_entry + ADD CONSTRAINT browse_series_entry_sort_value_value_key UNIQUE(sort_value, value); + +ALTER TABLE metabib.browse_subject_entry + ADD CONSTRAINT browse_subject_entry_sort_value_value_key UNIQUE(sort_value, value); + +ALTER TABLE metabib.browse_title_entry + ADD CONSTRAINT browse_title_entry_sort_value_value_key UNIQUE(sort_value, value); diff --git a/KCLS/sql/kmain-976/kmain-976-indexing_ingest_or_delete-rollback.sql b/KCLS/sql/kmain-976/kmain-976-indexing_ingest_or_delete-rollback.sql new file mode 100644 index 0000000000..02590becc7 --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-indexing_ingest_or_delete-rollback.sql @@ -0,0 +1,151 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashps authority.simple_heading_plus%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashps IN SELECT * FROM authority.simple_heading_plus_set(NEW.marc) LOOP + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashps.record, ashps.atag, ashps.value, ashps.sort_value); + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- Get the search_class + SELECT INTO search_class cmf.field_class + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashps.atag; + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = ashps.value AND sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-976/kmain-976-indexing_ingest_or_delete.sql b/KCLS/sql/kmain-976/kmain-976-indexing_ingest_or_delete.sql new file mode 100644 index 0000000000..11560a69a7 --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-indexing_ingest_or_delete.sql @@ -0,0 +1,156 @@ +-- Function: authority.indexing_ingest_or_delete() + +-- DROP FUNCTION authority.indexing_ingest_or_delete(); + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete() + RETURNS trigger AS +$BODY$ +DECLARE + ashps authority.simple_heading_plus%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; + search_class text; + value_prepped TEXT; + field_id integer; +BEGIN + --ver1.2 updated with kmain-821 + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashps IN SELECT * FROM authority.simple_heading_plus_set(NEW.marc) LOOP + + -- Get the search_class + SELECT INTO search_class, field_id cmf.field_class, cmf.id + FROM authority.control_set_auth_field_metabib_field_map_refs AS acsafmfmr + JOIN config.metabib_field AS cmf + ON acsafmfmr.metabib_field = cmf.id + WHERE acsafmfmr.authority_field = ashps.atag; + + value_prepped := metabib.browse_normalize(ashps.original_text, field_id); + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashps.record, ashps.atag, value_prepped, ashps.sort_value); + + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + -- CASE statement switches on search_class to use the correct browse table (author, series, subject, title) + CASE search_class + WHEN 'author' THEN + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'series' THEN + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'subject' THEN + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + WHEN 'title' THEN + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ashps.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( ashps.original_text, ashps.sort_value, substr(ashps.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + ELSE + -- mainly to handle when search_class is 'keyword' + END CASE; + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION authority.indexing_ingest_or_delete() + OWNER TO evergreen; + diff --git a/KCLS/sql/kmain-976/kmain-976-reingest_metabib_field_entries-rollback.sql b/KCLS/sql/kmain-976/kmain-976-reingest_metabib_field_entries-rollback.sql new file mode 100644 index 0000000000..b05a741a04 --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-reingest_metabib_field_entries-rollback.sql @@ -0,0 +1,197 @@ + +n: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + + + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO bbonner; + diff --git a/KCLS/sql/kmain-976/kmain-976-reingest_metabib_field_entries.sql b/KCLS/sql/kmain-976/kmain-976-reingest_metabib_field_entries.sql new file mode 100644 index 0000000000..f5b37cc8eb --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-reingest_metabib_field_entries.sql @@ -0,0 +1,196 @@ +-- Function: metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + +-- DROP FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean); + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries(bib_id bigint, skip_facet boolean DEFAULT false, skip_browse boolean DEFAULT false, skip_search boolean DEFAULT false) + RETURNS void AS +$BODY$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; + field_class TEXT; +BEGIN + --ver1.0 + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_author_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_title_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_subject_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_series_entry_def_map WHERE source = bib_id; + DELETE FROM metabib.browse_call_number_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + + --ind_data.field_class -- author, title, subject, etc + + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + IF char_length(value_prepped) > 0 THEN + CASE ind_data.field_class + + WHEN 'author' THEN + + SELECT INTO mbe_row * FROM metabib.browse_author_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_author_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_author_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_author_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'title' THEN + + SELECT INTO mbe_row * FROM metabib.browse_title_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_title_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_title_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_title_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'subject' THEN + + SELECT INTO mbe_row * FROM metabib.browse_subject_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_subject_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_subject_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_subject_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'series' THEN + + SELECT INTO mbe_row * FROM metabib.browse_series_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_series_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_series_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_series_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + + WHEN 'call_number' THEN + + SELECT INTO mbe_row * FROM metabib.browse_call_number_entry + WHERE sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_call_number_entry + ( value, sort_value, truncated_sort_value ) VALUES + ( value_prepped, ind_data.sort_value, substr(ind_data.sort_value, 1, 2700) ); + + mbe_id := CURRVAL('metabib.browse_call_number_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_call_number_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + ELSE + END CASE; + END IF; + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$BODY$ + LANGUAGE plpgsql VOLATILE + COST 100; +ALTER FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) + OWNER TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO public; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO evergreen; +GRANT EXECUTE ON FUNCTION metabib.reingest_metabib_field_entries(bigint, boolean, boolean, boolean) TO bbonner; + diff --git a/KCLS/sql/kmain-976/kmain-976-upgrade-prep.sql b/KCLS/sql/kmain-976/kmain-976-upgrade-prep.sql new file mode 100644 index 0000000000..c05b4ae68b --- /dev/null +++ b/KCLS/sql/kmain-976/kmain-976-upgrade-prep.sql @@ -0,0 +1,36 @@ +--THIS IS FOR CLEANING UP AN EXISTING ENVIRONMENT +--REINGESTING BIB/AUTH WILL POPULATE THESE BACK + +--BROWSE AUTHOR ENTRY +DELETE FROM metabib.browse_author_entry_simple_heading_map; +DELETE FROM metabib.browse_author_entry_def_map; +DELETE FROM metabib.browse_author_entry; + +--BROWSE CALL NUMBER ENTRY +DELETE FROM metabib.browse_call_number_entry_def_map; +DELETE FROM metabib.browse_call_number_entry; + +--BROWSE SERIES ENTRY +DELETE FROM metabib.browse_series_entry_simple_heading_map; +DELETE FROM metabib.browse_series_entry_def_map; +DELETE FROM metabib.browse_series_entry; + +--BROWSE TITLE ENTRY +DELETE FROM metabib.browse_title_entry_simple_heading_map; +DELETE FROM metabib.browse_title_entry_def_map; +DELETE FROM metabib.browse_title_entry; + +--BROWSE SUBJECT ENTRY +DELETE FROM metabib.browse_subject_entry_simple_heading_map; +DELETE FROM metabib.browse_subject_entry_def_map; +DELETE FROM metabib.browse_subject_entry; + +--AUTHORITY +DELETE FROM authority.simple_heading; +DELETE FROM authority.full_rec; + +--AUTH-AUTH LINKING +DELETE FROM authority.authority_linking; + +--AUTH-BIB LINKING +DELETE FROM authority.bib_linking; diff --git a/KCLS/sql/kmain221/0815.schema.config-metabib-interauthority.sql b/KCLS/sql/kmain221/0815.schema.config-metabib-interauthority.sql new file mode 100644 index 0000000000..8ec25d20a3 --- /dev/null +++ b/KCLS/sql/kmain221/0815.schema.config-metabib-interauthority.sql @@ -0,0 +1,126 @@ +BEGIN; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN linking_subfield CHAR(1); + +UPDATE authority.control_set_authority_field + SET linking_subfield = '0' WHERE main_entry IS NOT NULL; + +CREATE TABLE authority.authority_linking ( + id BIGSERIAL PRIMARY KEY, + source BIGINT REFERENCES authority.record_entry (id) NOT NULL, + target BIGINT REFERENCES authority.record_entry (id) NOT NULL, + field INT REFERENCES authority.control_set_authority_field (id) NOT NULL +); + +-- Given an authority record's ID, control set ID (if known), and marc::XML, +-- return all links to other authority records in the form of rows that +-- can be inserted into authority.authority_linking. +CREATE OR REPLACE FUNCTION authority.calculate_authority_linking( + rec_id BIGINT, rec_control_set INT, rec_marc_xml XML +) RETURNS SETOF authority.authority_linking AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + link TEXT; + aal authority.authority_linking%ROWTYPE; +BEGIN + IF rec_control_set IS NULL THEN + -- No control_set on record? Guess at one + SELECT control_set INTO rec_control_set + FROM authority.control_set_authority_field + WHERE tag IN ( + SELECT UNNEST( + XPATH('//*[starts-with(@tag,"1")]/@tag',rec_marc_xml::XML)::TEXT[] + ) + ) LIMIT 1; + + IF NOT FOUND THEN + RAISE WARNING 'Could not even guess at control set for authority record %', rec_id; + RETURN; + END IF; + END IF; + + aal.source := rec_id; + + FOR acsaf IN + SELECT * FROM authority.control_set_authority_field + WHERE control_set = rec_control_set + AND linking_subfield IS NOT NULL + AND main_entry IS NOT NULL + LOOP + link := SUBSTRING( + (XPATH('//*[@tag="' || acsaf.tag || '"]/*[@code="' || + acsaf.linking_subfield || '"]/text()', rec_marc_xml))[1]::TEXT, + '\d+$' + ); + + -- Ignore links that are null, malformed, circular, or point to + -- non-existent authority records. + IF link IS NOT NULL AND link::BIGINT <> rec_id THEN + PERFORM * FROM authority.record_entry WHERE id = link::BIGINT; + IF FOUND THEN + aal.target := link::BIGINT; + aal.field := acsaf.id; + RETURN NEXT aal; + END IF; + END IF; + END LOOP; +END; +$func$ LANGUAGE PLPGSQL; + + +-- AFTER UPDATE OR INSERT trigger for authority.record_entry +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete () RETURNS TRIGGER AS $func$ +BEGIN + + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + SELECT record, atag, value, sort_value FROM authority.simple_heading_set(NEW.marc); + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; diff --git a/KCLS/sql/kmain221/0816.schema.bib-auth-browse.sql b/KCLS/sql/kmain221/0816.schema.bib-auth-browse.sql new file mode 100644 index 0000000000..74506a3da7 --- /dev/null +++ b/KCLS/sql/kmain221/0816.schema.bib-auth-browse.sql @@ -0,0 +1,7527 @@ +BEGIN; + +-- Section 1: authority.control_set_authority_field table -- SAFE + +-- To avoid problems with altering a table column after doing an +-- update. +ALTER TABLE authority.control_set_authority_field + DISABLE TRIGGER ALL; + +ALTER TABLE authority.control_set_authority_field + ADD COLUMN display_sf_list TEXT; + +UPDATE authority.control_set_authority_field + SET display_sf_list = REGEXP_REPLACE(sf_list, '[w254]', '', 'g'); + +ALTER TABLE authority.control_set_authority_field + ALTER COLUMN display_sf_list SET NOT NULL; + +ALTER TABLE authority.control_set_authority_field + ENABLE TRIGGER ALL; + +-- Section 2: metabib.browse_entry_def_map and config.metabib_field -- SAFE +-- I think this breaks it because in KCLS, they changed some of the formats to kcls and changed the xpath. + +ALTER TABLE metabib.browse_entry_def_map + ADD COLUMN authority BIGINT REFERENCES authority.record_entry (id) + ON DELETE SET NULL; + +ALTER TABLE config.metabib_field ADD COLUMN authority_xpath TEXT; +ALTER TABLE config.metabib_field ADD COLUMN browse_sort_xpath TEXT; + +UPDATE config.metabib_field + SET authority_xpath = '//@xlink:href' + WHERE + format = 'mods32' AND + field_class IN ('subject','series','title','author') AND + browse_field IS TRUE; + +UPDATE config.metabib_field + SET authority_xpath = '//@xlink:href' + WHERE + format = 'kcls' AND + field_class IN ('subject','series','title','author') AND + browse_field IS TRUE; + +ALTER TYPE metabib.field_entry_template ADD ATTRIBUTE authority BIGINT; +ALTER TYPE metabib.field_entry_template ADD ATTRIBUTE sort_value TEXT; + +-- Section 3: metabib.reingest_metabib_field_entries function + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +-- Section 4: biblio.extract_metabib_field_entry function + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry ( rid BIGINT, default_joiner TEXT ) RETURNS SETOF metabib.field_entry_template AS $func$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + curr_text := ARRAY_TO_STRING( + oils_xpath( '//text()', + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ) + ), + ' ' + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.search_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$func$ LANGUAGE PLPGSQL; + +-- Section 6: config.xml_transform + +-- 953.data.MODS32-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + + + + b + afgk + + + + + abfgk + + + + + + <xsl:value-of select="$title"/> + + + + + + + b + b + afgk + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + a + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,$nfi+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + + ah + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + + + av + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + + adfgklmorsv + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + issn + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + ba + ab + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abje + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + + + adfhklor + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods32'; + +-- Section 7: config.xml_transform + +-- 954.data.MODS33-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ + + + + ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ + + + + !'()*-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz~ + + + 0123456789ABCDEF + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, b --> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="uri"/> + + <xsl:variable name="str"> + <xsl:for-each select="marc:subfield"> + <xsl:if + test="(contains('adfklmors',@code) and (not(../marc:subfield[@code='n' or @code='p']) or (following-sibling::marc:subfield[@code='n' or @code='p'])))"> + <xsl:value-of select="text()"/> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="substring($str,1,string-length($str)-1)"/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">ah</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + creator + + + + + + + + + + + + creator + + + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote-sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + offprint + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + + + + abcdef + - + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="marc:subfield[@code='a']"/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + + issn + + + + + + + + issn-l + + + + + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + + ba + ab + + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abe + + + + + + + + + u + + + + + + + + hijklmt + + + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + aacr2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."/> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfhklor</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + abcdefghijklmnopqrstuvwxyz + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Warning: string contains a character + that is out of range! Substituting "?". + 63 + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods33'; + +-- Section 8: config.global_flag + +INSERT INTO config.global_flag (name, value, enabled, label) VALUES +( + 'opac.browse.warnable_regexp_per_class', + '{"title": "^(a|the|an)\\s"}', + FALSE, + oils_i18n_gettext( + 'opac.browse.warnable_regexp_per_class', + 'Map of search classes to regular expressions to warn user about leading articles.', + 'cgf', + 'label' + ) +), +( + 'opac.browse.holdings_visibility_test_limit', + '100', + TRUE, + oils_i18n_gettext( + 'opac.browse.holdings_visibility_test_limit', + 'Don''t look for more than this number of records with holdings when displaying browse headings with visible record counts.', + 'cgf', + 'label' + ) +); + +-- Section 10: metabib.browse_entry and metabib.browse_entry_def_map + +ALTER TABLE metabib.browse_entry DROP CONSTRAINT browse_entry_value_key; +ALTER TABLE metabib.browse_entry ADD COLUMN sort_value TEXT; +DELETE FROM metabib.browse_entry_def_map; -- Yeah. +DELETE FROM metabib.browse_entry WHERE sort_value IS NULL; +ALTER TABLE metabib.browse_entry ALTER COLUMN sort_value SET NOT NULL; +ALTER TABLE metabib.browse_entry ADD UNIQUE (sort_value, value); +DROP TRIGGER IF EXISTS mbe_sort_value ON metabib.browse_entry; + +CREATE INDEX browse_entry_sort_value_idx + ON metabib.browse_entry USING BTREE (sort_value); + +-- NOTE If I understand ordered indices correctly, an index on sort_value DESC +-- is not actually needed, even though we do have a query that does ORDER BY +-- on this column in that direction. The previous index serves for both +-- directions, and ordering in an index is only helpful for multi-column +-- indices, I think. See http://www.postgresql.org/docs/9.1/static/indexes-ordering.html + +-- CREATE INDEX CONCURRENTLY browse_entry_sort_value_idx_desc +-- ON metabib.browse_entry USING BTREE (sort_value DESC); + +-- Section 11: metabib.flat_browse_entry_appearance + +CREATE TYPE metabib.flat_browse_entry_appearance AS ( + browse_entry BIGINT, + value TEXT, + fields TEXT, + authorities TEXT, + sources INT, -- visible ones, that is + row_number INT, -- internal use, sort of + accurate BOOL, -- Count in sources field is accurate? Not + -- if we had more than a browse superpage + -- of records to look at. + pivot_point BIGINT +); + +-- Section 12: metabib.browse_pivot function + +CREATE OR REPLACE FUNCTION metabib.browse_pivot( + search_field INT[], + browse_term TEXT +) RETURNS BIGINT AS $p$ +DECLARE + id BIGINT; +BEGIN + SELECT INTO id mbe.id FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_def_map mbedm ON ( + mbedm.entry = mbe.id AND + mbedm.def = ANY(search_field) + ) + WHERE mbe.sort_value >= public.search_normalize(browse_term) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; + + RETURN id; +END; +$p$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION metabib.staged_browse( + query TEXT, + fields INT[], + context_org INT, + context_locations INT[], + staff BOOL, + browse_superpage_size INT, + count_up_from_zero BOOL, -- if false, count down from -1 + result_limit INT, + next_pivot_pos INT +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + -- Gather aggregate data based on the MBE row we're looking at now + SELECT INTO all_records, result_row.authorities, result_row.fields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT def), $$,$$) + FROM metabib.browse_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + result_row.sources := 0; + + full_end := ARRAY_LENGTH(all_records, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_records[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + IF result_row.sources > 0 THEN + -- We've got a browse entry with visible holdings. Yay. + + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.accurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$p$ LANGUAGE PLPGSQL; + +-- Section 13: metabib.browse function + +CREATE OR REPLACE FUNCTION metabib.browse( + search_field INT[], + browse_term TEXT, + context_org INT DEFAULT NULL, + context_loc_group INT DEFAULT NULL, + staff BOOL DEFAULT FALSE, + pivot_id BIGINT DEFAULT NULL, + result_limit INT DEFAULT 10 +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + pivot_id := metabib.browse_pivot(search_field, browse_term); + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' + SELECT + mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_entry mbe + WHERE EXISTS (SELECT 1 FROM metabib.browse_entry_def_map mbedm WHERE + mbedm.entry = mbe.id AND + mbedm.def = ANY(' || quote_literal(search_field) || ') + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.sort_value <= ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.sort_value > ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot + ) ORDER BY row_number DESC; + +END; +$p$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION metabib.browse( + search_class TEXT, + browse_term TEXT, + context_org INT DEFAULT NULL, + context_loc_group INT DEFAULT NULL, + staff BOOL DEFAULT FALSE, + pivot_id BIGINT DEFAULT NULL, + result_limit INT DEFAULT 10 +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +BEGIN + RETURN QUERY SELECT * FROM metabib.browse( + (SELECT COALESCE(ARRAY_AGG(id), ARRAY[]::INT[]) + FROM config.metabib_field WHERE field_class = search_class), + browse_term, + context_org, + context_loc_group, + staff, + pivot_id, + result_limit + ); +END; +$p$ LANGUAGE PLPGSQL; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:relatedItem[@type="series"]/mods32:titleInfo[@type="nfi"]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'series' AND name = 'seriestitle' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and not (@type)]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL, + browse_field = TRUE +WHERE + field_class = 'title' AND name = 'proper' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='alternative-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'alternative' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='uniform-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'uniform' AND format = 'mods32'; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and (@type='translated-nfi')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'translated' AND format = 'mods32'; + +-- This keeps extra terms like "creator" out of browse headings. +UPDATE config.metabib_field + SET browse_xpath = $$//*[local-name()='namePart']$$ -- vim */ + WHERE + browse_field AND + browse_xpath IS NULL AND + field_class = 'author'; + +-- Section 14: config.org_unit_setting_type + +INSERT INTO config.org_unit_setting_type ( + name, label, grp, description, datatype +) VALUES ( + 'opac.browse.pager_shortcuts', + 'Paging shortcut links for OPAC Browse', + 'opac', + 'The characters in this string, in order, will be used as shortcut links for quick paging in the OPAC browse interface. Any sequence surrounded by asterisks will be taken as a whole label, not split into individual labels at the character level, but only the first character will serve as the basis of the search.', + 'string' +); + +COMMIT; + +--SELECT metabib.reingest_metabib_field_entries(id, TRUE, FALSE, TRUE) +-- FROM biblio.record_entry; diff --git a/KCLS/sql/kmain221/0821.function.browse-normalize-timing.sql b/KCLS/sql/kmain221/0821.function.browse-normalize-timing.sql new file mode 100644 index 0000000000..1c9591aa83 --- /dev/null +++ b/KCLS/sql/kmain221/0821.function.browse-normalize-timing.sql @@ -0,0 +1,95 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + mbe_txt TEXT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + mbe_txt := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry WHERE value = mbe_txt; + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry (value) VALUES (mbe_txt); + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source) + VALUES (mbe_id, ind_data.field, ind_data.source); + END IF; + + -- Avoid inserting duplicate rows, but retain granularity of being + -- able to search browse fields with "starts with" type operators + -- (for example, for titles of songs in music albums) + IF (ind_data.search_field OR ind_data.browse_field) AND NOT b_skip_search THEN + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; + diff --git a/KCLS/sql/kmain221/0837.schema.browse-auth-linking.plus-joiner.sql b/KCLS/sql/kmain221/0837.schema.browse-auth-linking.plus-joiner.sql new file mode 100644 index 0000000000..278a5fddc6 --- /dev/null +++ b/KCLS/sql/kmain221/0837.schema.browse-auth-linking.plus-joiner.sql @@ -0,0 +1,985 @@ +-- Evergreen DB patch 0837.schema.browse-auth-linking.plus-joiner.sql +-- +-- In this upgrade script we complete inter-subfield joiner support, so that +-- subject components can be separated by " -- ", for instance. That's the +-- easy part. +-- +-- We also add the ability to browse by in-use authority main entries and find +-- bibs that use unauthorized versions of the authority's value, by string matching. +-- +BEGIN; + +ALTER TABLE config.metabib_field ADD COLUMN joiner TEXT; +UPDATE config.metabib_field SET joiner = ' -- ' WHERE field_class = 'subject' AND name NOT IN ('name', 'complete'); + +-- To avoid problems with altering a table column after doing an +-- update. +ALTER TABLE authority.control_set_authority_field DISABLE TRIGGER ALL; + +ALTER TABLE authority.control_set_authority_field ADD COLUMN joiner TEXT; +UPDATE authority.control_set_authority_field SET joiner = ' -- ' WHERE tag LIKE ANY (ARRAY['_4_','_5_','_8_']); + +ALTER TABLE authority.control_set_authority_field ENABLE TRIGGER ALL; + +-- Seed data will be generated from class <-> axis mapping +CREATE TABLE authority.control_set_bib_field_metabib_field_map ( + id SERIAL PRIMARY KEY, + bib_field INT NOT NULL REFERENCES authority.control_set_bib_field (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + metabib_field INT NOT NULL REFERENCES config.metabib_field (id) ON UPDATE CASCADE ON DELETE CASCADE DEFERRABLE INITIALLY DEFERRED, + CONSTRAINT a_bf_mf_map_once UNIQUE (bib_field, metabib_field) +); + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_main AS + SELECT DISTINCT b.authority_field, m.metabib_field + FROM authority.control_set_bib_field_metabib_field_map m JOIN authority.control_set_bib_field b ON (b.id = m.bib_field); +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_main IS $$metabib fields for main entry auth fields$$; + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_refs_only AS + SELECT DISTINCT a.id AS authority_field, m.metabib_field + FROM authority.control_set_authority_field a + JOIN authority.control_set_authority_field ame ON (a.main_entry = ame.id) + JOIN authority.control_set_bib_field b ON (b.authority_field = ame.id) + JOIN authority.control_set_bib_field_metabib_field_map mf ON (mf.bib_field = b.id) + JOIN authority.control_set_auth_field_metabib_field_map_main m ON (ame.id = m.authority_field); +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_refs_only IS $$metabib fields for NON-main entry auth fields$$; + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_refs AS + SELECT * FROM authority.control_set_auth_field_metabib_field_map_main + UNION + SELECT * FROM authority.control_set_auth_field_metabib_field_map_refs_only; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_refs IS $$metabib fields for all auth fields$$; + + +-- blind refs only is probably what we want for lookup in bib/auth browse +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_refs_only AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_refs_only r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_refs_only IS $$metabib fields for NON-main entry auth fields that can't be linked to other records$$; -- ' + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_refs AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_refs r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_refs IS $$metabib fields for all auth fields that can't be linked to other records$$; -- ' + +CREATE VIEW authority.control_set_auth_field_metabib_field_map_blind_main AS + SELECT r.* + FROM authority.control_set_auth_field_metabib_field_map_main r + JOIN authority.control_set_authority_field a ON (r.authority_field = a.id) + WHERE linking_subfield IS NULL; +COMMENT ON VIEW authority.control_set_auth_field_metabib_field_map_blind_main IS $$metabib fields for main entry auth fields that can't be linked to other records$$; -- ' + +CREATE OR REPLACE FUNCTION authority.normalize_heading( marcxml TEXT, no_thesaurus BOOL ) RETURNS TEXT AS $func$ +DECLARE + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + sf_node TEXT; + tag_node TEXT; + thes_code TEXT; + cset INT; + heading_text TEXT; + tmp_text TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + thes_code := vandelay.marc21_extract_fixed_field(marcxml,'Subj'); + IF thes_code IS NULL THEN + thes_code := '|'; + ELSIF thes_code = 'z' THEN + thes_code := COALESCE( oils_xpath_string('//*[@tag="040"]/*[@code="f"][1]', marcxml), '' ); + END IF; + + heading_text := ''; + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset AND main_entry IS NULL LOOP + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + first_sf := TRUE; + + FOR tag_node IN SELECT unnest(oils_xpath('//*[@tag="'||tag_used||'"]',marcxml)) LOOP + FOR sf_node IN SELECT unnest(oils_xpath('./*[contains("'||acsaf.sf_list||'",@code)]',tag_node)) LOOP + + tmp_text := oils_xpath_string('.', sf_node); + sf := oils_xpath_string('./@code', sf_node); + + IF first_sf AND tmp_text IS NOT NULL AND nfi_used IS NOT NULL THEN + + tmp_text := SUBSTRING( + tmp_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tag_node), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + + END IF; + + first_sf := FALSE; + + IF tmp_text IS NOT NULL AND tmp_text <> '' THEN + heading_text := heading_text || E'\u2021' || sf || ' ' || tmp_text; + END IF; + END LOOP; + + EXIT WHEN heading_text <> ''; + END LOOP; + + EXIT WHEN heading_text <> ''; + END LOOP; + + IF heading_text <> '' THEN + IF no_thesaurus IS TRUE THEN + heading_text := tag_used || ' ' || public.naco_normalize(heading_text); + ELSE + heading_text := tag_used || '_' || COALESCE(nfi_used,'-') || '_' || thes_code || ' ' || public.naco_normalize(heading_text); + END IF; + ELSE + heading_text := 'NOHEADING_' || thes_code || ' ' || MD5(marcxml); + END IF; + + RETURN heading_text; +END; +$func$ LANGUAGE PLPGSQL IMMUTABLE; + +CREATE OR REPLACE FUNCTION authority.simple_heading_set( marcxml TEXT ) RETURNS SETOF authority.simple_heading AS $func$ +DECLARE + res authority.simple_heading%ROWTYPE; + acsaf authority.control_set_authority_field%ROWTYPE; + tag_used TEXT; + nfi_used TEXT; + sf TEXT; + cset INT; + heading_text TEXT; + joiner_text TEXT; + sort_text TEXT; + tmp_text TEXT; + tmp_xml TEXT; + first_sf BOOL; + auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT; +BEGIN + + SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id; + + IF cset IS NULL THEN + SELECT control_set INTO cset + FROM authority.control_set_authority_field + WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[])) + LIMIT 1; + END IF; + + res.record := auth_id; + + FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset LOOP + + res.atag := acsaf.id; + tag_used := acsaf.tag; + nfi_used := acsaf.nfi; + joiner_text := COALESCE(acsaf.joiner, ' '); + + FOR tmp_xml IN SELECT UNNEST(XPATH('//*[@tag="'||tag_used||'"]', marcxml::XML)) LOOP + + heading_text := COALESCE( + oils_xpath_string('./*[contains("'||acsaf.sf_list||'",@code)]', tmp_xml::TEXT, joiner_text), + '' + ); + + IF nfi_used IS NOT NULL THEN + + sort_text := SUBSTRING( + heading_text FROM + COALESCE( + NULLIF( + REGEXP_REPLACE( + oils_xpath_string('./@ind'||nfi_used, tmp_xml::TEXT), + $$\D+$$, + '', + 'g' + ), + '' + )::INT, + 0 + ) + 1 + ); + + ELSE + sort_text := heading_text; + END IF; + + IF heading_text IS NOT NULL AND heading_text <> '' THEN + res.value := heading_text; + res.sort_value := public.naco_normalize(sort_text); + res.index_vector = to_tsvector('keyword'::regconfig, res.sort_value); + RETURN NEXT res; + END IF; + + END LOOP; + + END LOOP; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL IMMUTABLE; + +CREATE TABLE metabib.browse_entry_simple_heading_map ( + id BIGSERIAL PRIMARY KEY, + entry BIGINT REFERENCES metabib.browse_entry (id), + simple_heading BIGINT REFERENCES authority.simple_heading (id) ON DELETE CASCADE +); +CREATE INDEX browse_entry_sh_map_entry_idx ON metabib.browse_entry_simple_heading_map (entry); +CREATE INDEX browse_entry_sh_map_sh_idx ON metabib.browse_entry_simple_heading_map (simple_heading); + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry ( rid BIGINT, default_joiner TEXT ) RETURNS SETOF metabib.field_entry_template AS $func$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$func$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE + FUNCTION metabib.autosuggest_prepare_tsquery(orig TEXT) RETURNS TEXT[] AS +$$ +DECLARE + orig_ended_in_space BOOLEAN; + result RECORD; + plain TEXT; + normalized TEXT; +BEGIN + orig_ended_in_space := orig ~ E'\\s$'; + + orig := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(orig, E'\\W+'), ' ' + ); + + normalized := public.naco_normalize(orig); -- also trim()s + plain := trim(orig); + + IF NOT orig_ended_in_space THEN + plain := plain || ':*'; + normalized := normalized || ':*'; + END IF; + + plain := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(plain, E'\\s+'), ' & ' + ); + normalized := ARRAY_TO_STRING( + evergreen.regexp_split_to_array(normalized, E'\\s+'), ' & ' + ); + + RETURN ARRAY[normalized, plain]; +END; +$$ LANGUAGE PLPGSQL; + +ALTER TYPE metabib.flat_browse_entry_appearance ADD ATTRIBUTE sees TEXT; +ALTER TYPE metabib.flat_browse_entry_appearance ADD ATTRIBUTE asources INT; +ALTER TYPE metabib.flat_browse_entry_appearance ADD ATTRIBUTE aaccurate TEXT; + +CREATE OR REPLACE FUNCTION metabib.browse_bib_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_def_map mbedm ON ( + mbedm.entry = mbe.id + AND mbedm.def = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION metabib.browse_authority_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION metabib.browse_authority_refs_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT mbe.id + FROM metabib.browse_entry mbe + JOIN metabib.browse_entry_simple_heading_map mbeshm ON ( mbeshm.entry = mbe.id ) + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs_only map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY($1) + ) + WHERE mbe.sort_value >= public.naco_normalize($2) + ORDER BY mbe.sort_value, mbe.value LIMIT 1; +$p$ LANGUAGE SQL; + +-- The drop is necessary because the language change from PLPGSQL to SQL +-- carries with it name changes to the parameters +DROP FUNCTION metabib.browse_pivot(INT[], TEXT); +CREATE FUNCTION metabib.browse_pivot( + INT[], + TEXT +) RETURNS BIGINT AS $p$ + SELECT id FROM metabib.browse_entry + WHERE id IN ( + metabib.browse_bib_pivot($1, $2), + metabib.browse_authority_refs_pivot($1,$2) -- only look in 4xx, 5xx, 7xx of authority + ) + ORDER BY sort_value, value LIMIT 1; +$p$ LANGUAGE SQL; + +CREATE OR REPLACE FUNCTION metabib.staged_browse( + query TEXT, + fields INT[], + context_org INT, + context_locations INT[], + staff BOOL, + browse_superpage_size INT, + count_up_from_zero BOOL, -- if false, count down from -1 + result_limit INT, + next_pivot_pos INT +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + curs REFCURSOR; + rec RECORD; + qpfts_query TEXT; + aqpfts_query TEXT; + afields INT[]; + bfields INT[]; + result_row metabib.flat_browse_entry_appearance%ROWTYPE; + results_skipped INT := 0; + row_counter INT := 0; + row_number INT; + slice_start INT; + slice_end INT; + full_end INT; + all_records BIGINT[]; + all_brecords BIGINT[]; + all_arecords BIGINT[]; + superpage_of_records BIGINT[]; + superpage_size INT; +BEGIN + IF count_up_from_zero THEN + row_number := 0; + ELSE + row_number := -1; + END IF; + + OPEN curs FOR EXECUTE query; + + LOOP + FETCH curs INTO rec; + IF NOT FOUND THEN + IF result_row.pivot_point IS NOT NULL THEN + RETURN NEXT result_row; + END IF; + RETURN; + END IF; + + + -- Gather aggregate data based on the MBE row we're looking at now, authority axis + SELECT INTO all_arecords, result_row.sees, afields + ARRAY_AGG(DISTINCT abl.bib), -- bibs to check for visibility + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT aal.source), $$,$$), -- authority record ids + ARRAY_AGG(DISTINCT map.metabib_field) -- authority-tag-linked CMF rows + + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.authority_linking aal ON ( ash.record = aal.source ) + JOIN authority.bib_linking abl ON ( aal.target = abl.authority ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(fields) + ) + WHERE mbeshm.entry = rec.id; + + + -- Gather aggregate data based on the MBE row we're looking at now, bib axis + SELECT INTO all_brecords, result_row.authorities, bfields + ARRAY_AGG(DISTINCT source), + ARRAY_TO_STRING(ARRAY_AGG(DISTINCT authority), $$,$$), + ARRAY_AGG(DISTINCT def) + FROM metabib.browse_entry_def_map + WHERE entry = rec.id + AND def = ANY(fields); + + SELECT INTO result_row.fields ARRAY_TO_STRING(ARRAY_AGG(DISTINCT x), $$,$$) FROM UNNEST(afields || bfields) x; + + result_row.sources := 0; + result_row.asources := 0; + + -- Bib-linked vis checking + IF ARRAY_UPPER(all_brecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_brecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.sources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_brecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, until we've + -- either exhausted that set of records or found at least 1 + -- visible record. + + SELECT INTO result_row.sources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + -- Accurate? Well, probably. + result_row.accurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + -- Authority-linked vis checking + IF ARRAY_UPPER(all_arecords,1) IS NOT NULL THEN + + full_end := ARRAY_LENGTH(all_arecords, 1); + superpage_size := COALESCE(browse_superpage_size, full_end); + slice_start := 1; + slice_end := superpage_size; + + WHILE result_row.asources = 0 AND slice_start <= full_end LOOP + superpage_of_records := all_arecords[slice_start:slice_end]; + qpfts_query := + 'SELECT NULL::BIGINT AS id, ARRAY[r] AS records, ' || + '1::INT AS rel FROM (SELECT UNNEST(' || + quote_literal(superpage_of_records) || '::BIGINT[]) AS r) rr'; + + -- We use search.query_parser_fts() for visibility testing. + -- We're calling it once per browse-superpage worth of records + -- out of the set of records related to a given mbe, via + -- authority until we've either exhausted that set of records + -- or found at least 1 visible record. + + SELECT INTO result_row.asources visible + FROM search.query_parser_fts( + context_org, NULL, qpfts_query, NULL, + context_locations, 0, NULL, NULL, FALSE, staff, FALSE + ) qpfts + WHERE qpfts.rel IS NULL; + + slice_start := slice_start + superpage_size; + slice_end := slice_end + superpage_size; + END LOOP; + + + -- Accurate? Well, probably. + result_row.aaccurate := browse_superpage_size IS NULL OR + browse_superpage_size >= full_end; + + END IF; + + IF result_row.sources > 0 OR result_row.asources > 0 THEN + + -- The function that calls this function needs row_number in order + -- to correctly order results from two different runs of this + -- functions. + result_row.row_number := row_number; + + -- Now, if row_counter is still less than limit, return a row. If + -- not, but it is less than next_pivot_pos, continue on without + -- returning actual result rows until we find + -- that next pivot, and return it. + + IF row_counter < result_limit THEN + result_row.browse_entry := rec.id; + result_row.value := rec.value; + + RETURN NEXT result_row; + ELSE + result_row.browse_entry := NULL; + result_row.authorities := NULL; + result_row.fields := NULL; + result_row.value := NULL; + result_row.sources := NULL; + result_row.sees := NULL; + result_row.accurate := NULL; + result_row.aaccurate := NULL; + result_row.pivot_point := rec.id; + + IF row_counter >= next_pivot_pos THEN + RETURN NEXT result_row; + RETURN; + END IF; + END IF; + + IF count_up_from_zero THEN + row_number := row_number + 1; + ELSE + row_number := row_number - 1; + END IF; + + -- row_counter is different from row_number. + -- It simply counts up from zero so that we know when + -- we've reached our limit. + row_counter := row_counter + 1; + END IF; + END LOOP; +END; +$p$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION metabib.browse( + search_field INT[], + browse_term TEXT, + context_org INT DEFAULT NULL, + context_loc_group INT DEFAULT NULL, + staff BOOL DEFAULT FALSE, + pivot_id BIGINT DEFAULT NULL, + result_limit INT DEFAULT 10 +) RETURNS SETOF metabib.flat_browse_entry_appearance AS $p$ +DECLARE + core_query TEXT; + back_query TEXT; + forward_query TEXT; + pivot_sort_value TEXT; + pivot_sort_fallback TEXT; + context_locations INT[]; + browse_superpage_size INT; + results_skipped INT := 0; + back_limit INT; + back_to_pivot INT; + forward_limit INT; + forward_to_pivot INT; +BEGIN + -- First, find the pivot if we were given a browse term but not a pivot. + IF pivot_id IS NULL THEN + pivot_id := metabib.browse_pivot(search_field, browse_term); + END IF; + + SELECT INTO pivot_sort_value, pivot_sort_fallback + sort_value, value FROM metabib.browse_entry WHERE id = pivot_id; + + -- Bail if we couldn't find a pivot. + IF pivot_sort_value IS NULL THEN + RETURN; + END IF; + + -- Transform the context_loc_group argument (if any) (logc at the + -- TPAC layer) into a form we'll be able to use. + IF context_loc_group IS NOT NULL THEN + SELECT INTO context_locations ARRAY_AGG(location) + FROM asset.copy_location_group_map + WHERE lgroup = context_loc_group; + END IF; + + -- Get the configured size of browse superpages. + SELECT INTO browse_superpage_size value -- NULL ok + FROM config.global_flag + WHERE enabled AND name = 'opac.browse.holdings_visibility_test_limit'; + + -- First we're going to search backward from the pivot, then we're going + -- to search forward. In each direction, we need two limits. At the + -- lesser of the two limits, we delineate the edge of the result set + -- we're going to return. At the greater of the two limits, we find the + -- pivot value that would represent an offset from the current pivot + -- at a distance of one "page" in either direction, where a "page" is a + -- result set of the size specified in the "result_limit" argument. + -- + -- The two limits in each direction make four derived values in total, + -- and we calculate them now. + back_limit := CEIL(result_limit::FLOAT / 2); + back_to_pivot := result_limit; + forward_limit := result_limit / 2; + forward_to_pivot := result_limit - 1; + + -- This is the meat of the SQL query that finds browse entries. We'll + -- pass this to a function which uses it with a cursor, so that individual + -- rows may be fetched in a loop until some condition is satisfied, without + -- waiting for a result set of fixed size to be collected all at once. + core_query := ' +SELECT mbe.id, + mbe.value, + mbe.sort_value + FROM metabib.browse_entry mbe + WHERE ( + EXISTS ( -- are there any bibs using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_def_map mbedm + WHERE mbedm.entry = mbe.id AND mbedm.def = ANY(' || quote_literal(search_field) || ') + LIMIT 1 + ) OR EXISTS ( -- are there any authorities using this mbe via the requested fields? + SELECT 1 + FROM metabib.browse_entry_simple_heading_map mbeshm + JOIN authority.simple_heading ash ON ( mbeshm.simple_heading = ash.id ) + JOIN authority.control_set_auth_field_metabib_field_map_refs map ON ( + ash.atag = map.authority_field + AND map.metabib_field = ANY(' || quote_literal(search_field) || ') + ) + WHERE mbeshm.entry = mbe.id + ) + ) AND '; + + -- This is the variant of the query for browsing backward. + back_query := core_query || + ' mbe.sort_value <= ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value DESC, mbe.value DESC '; + + -- This variant browses forward. + forward_query := core_query || + ' mbe.sort_value > ' || quote_literal(pivot_sort_value) || + ' ORDER BY mbe.sort_value, mbe.value '; + + -- We now call the function which applies a cursor to the provided + -- queries, stopping at the appropriate limits and also giving us + -- the next page's pivot. + RETURN QUERY + SELECT * FROM metabib.staged_browse( + back_query, search_field, context_org, context_locations, + staff, browse_superpage_size, TRUE, back_limit, back_to_pivot + ) UNION + SELECT * FROM metabib.staged_browse( + forward_query, search_field, context_org, context_locations, + staff, browse_superpage_size, FALSE, forward_limit, forward_to_pivot + ) ORDER BY row_number DESC; + +END; +$p$ LANGUAGE PLPGSQL; + +-- No 4XX inter-authority linking +UPDATE authority.control_set_authority_field SET linking_subfield = NULL; +UPDATE authority.control_set_authority_field SET linking_subfield = '0' WHERE tag LIKE ANY (ARRAY['5%','7%']); + +-- Map between authority controlled bib fields and stock indexing metabib fields +INSERT INTO authority.control_set_bib_field_metabib_field_map (bib_field, metabib_field) + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '100' AND m.name = 'personal' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '110' AND m.name = 'corporate' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '111' AND m.name = 'conference' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '130' AND m.name = 'uniform' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '148' AND m.name = 'temporal' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '150' AND m.name = 'topic' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '151' AND m.name = 'geographic' + + UNION + + SELECT DISTINCT b.id AS bib_field, m.id AS metabib_field + FROM authority.control_set_bib_field b JOIN authority.control_set_authority_field a ON (b.authority_field = a.id), config.metabib_field m + WHERE a.tag = '155' AND m.name = 'genre' -- Just in case... +; + +CREATE OR REPLACE FUNCTION authority.indexing_ingest_or_delete () RETURNS TRIGGER AS $func$ +DECLARE + ashs authority.simple_heading%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + ash_id BIGINT; +BEGIN + + IF NEW.deleted IS TRUE THEN -- If this authority is deleted + DELETE FROM authority.bib_linking WHERE authority = NEW.id; -- Avoid updating fields in bibs that are no longer visible + DELETE FROM authority.full_rec WHERE record = NEW.id; -- Avoid validating fields against deleted authority records + DELETE FROM authority.simple_heading WHERE record = NEW.id; + -- Should remove matching $0 from controlled fields at the same time? + + -- XXX What do we about the actual linking subfields present in + -- authority records that target this one when this happens? + DELETE FROM authority.authority_linking + WHERE source = NEW.id OR target = NEW.id; + + RETURN NEW; -- and we're done + END IF; + + IF TG_OP = 'UPDATE' THEN -- re-ingest? + PERFORM * FROM config.internal_flag WHERE name = 'ingest.reingest.force_on_same_marc' AND enabled; + + IF NOT FOUND AND OLD.marc = NEW.marc THEN -- don't do anything if the MARC didn't change + RETURN NEW; + END IF; + + -- Propagate these updates to any linked bib records + PERFORM authority.propagate_changes(NEW.id) FROM authority.record_entry WHERE id = NEW.id; + + DELETE FROM authority.simple_heading WHERE record = NEW.id; + DELETE FROM authority.authority_linking WHERE source = NEW.id; + END IF; + + INSERT INTO authority.authority_linking (source, target, field) + SELECT source, target, field FROM authority.calculate_authority_linking( + NEW.id, NEW.control_set, NEW.marc::XML + ); + + FOR ashs IN SELECT * FROM authority.simple_heading_set(NEW.marc) LOOP + + INSERT INTO authority.simple_heading (record,atag,value,sort_value) + VALUES (ashs.record, ashs.atag, ashs.value, ashs.sort_value); + ash_id := CURRVAL('authority.simple_heading_id_seq'::REGCLASS); + + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = ashs.value AND sort_value = ashs.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( ashs.value, ashs.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_simple_heading_map (entry,simple_heading) VALUES (mbe_id,ash_id); + + END LOOP; + + -- Flatten and insert the afr data + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_full_rec' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_full_rec(NEW.id); + PERFORM * FROM config.internal_flag WHERE name = 'ingest.disable_authority_rec_descriptor' AND enabled; + IF NOT FOUND THEN + PERFORM authority.reingest_authority_rec_descriptor(NEW.id); + END IF; + END IF; + + RETURN NEW; +END; +$func$ LANGUAGE PLPGSQL; + +COMMIT; + diff --git a/KCLS/sql/kmain221/0839.data.alternative-title-indexing.sql b/KCLS/sql/kmain221/0839.data.alternative-title-indexing.sql new file mode 100644 index 0000000000..5cc4061b0d --- /dev/null +++ b/KCLS/sql/kmain221/0839.data.alternative-title-indexing.sql @@ -0,0 +1,100 @@ +BEGIN; + +UPDATE config.metabib_field +SET + xpath = $$//mods32:mods/mods32:titleInfo[mods32:title and starts-with(@type,'alternative')]$$, + browse_sort_xpath = $$*[local-name() != "nonSort"]$$, + browse_xpath = NULL +WHERE + field_class = 'title' AND name = 'alternative' AND format = 'mods32'; + +COMMIT; + +-- The following function only appears in the upgrade script and not the +-- baseline schema because it's not necessary in the latter (and it's a +-- temporary function). It just serves to do a hopefully cheaper, more +-- focused reingest just to hit the alternative title index. + +-- This cribs from the guts of metabib.reingest_metabib_field_entries(), +-- and if it actually is a timesaver over a full reingest, then at some +-- point in the future it would be nice if we broke it out into a separate +-- function to make things like this easier. + +CREATE OR REPLACE FUNCTION pg_temp.alternative_title_reingest( bib_id BIGINT ) RETURNS VOID AS $func$ +DECLARE + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL := false; + b_skip_browse BOOL := false; + b_skip_search BOOL := false; + alt_title INT; + value_prepped TEXT; +BEGIN + SELECT INTO alt_title id FROM config.metabib_field WHERE field_class = 'title' AND name = 'alternative'; + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) WHERE field = alt_title LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + -- Avoid inserting duplicate rows, but retain granularity of being + -- able to search browse fields with "starts with" type operators + -- (for example, for titles of songs in music albums) + IF (ind_data.search_field OR ind_data.browse_field) AND NOT b_skip_search THEN + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +--SELECT pg_temp.alternative_title_reingest(id) FROM biblio.record_entry WHERE NOT deleted; diff --git a/KCLS/sql/kmain221/0844.data.better_mods_for_browse_etc.sql b/KCLS/sql/kmain221/0844.data.better_mods_for_browse_etc.sql new file mode 100644 index 0000000000..e3a018210f --- /dev/null +++ b/KCLS/sql/kmain221/0844.data.better_mods_for_browse_etc.sql @@ -0,0 +1,3620 @@ +BEGIN; + +-- 953.data.MODS32-xsl.sql +UPDATE config.xml_transform SET xslt=$$ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BK + SE + + + BK + MM + CF + MP + VM + MU + + + + + + + + + b + afgk + + + + + abfgk + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + b + b + afgk + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">abfgk</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abfgk + + + + + + + + + + + <xsl:value-of select="substring($titleBrowseChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleBrowseChop"/> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">a</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + a + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <!-- 1/04 removed $h, $b --> + <xsl:with-param name="codes">af</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,$nfi+1)"/> + + + + + <xsl:value-of select="$titleChop"/> + + + + + + + + + + + + ah + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + creator + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + personal + + + + + + + + + + + yes + + + yes + + + text + cartographic + notated music + sound recording-nonmusical + sound recording-musical + still image + moving image + three dimensional object + software, multimedia + mixed material + + + + globe + + + remote sensing image + + + + + + map + + + atlas + + + + + + + + database + + + loose-leaf + + + series + + + newspaper + + + periodical + + + web site + + + + + + + + abstract or summary + + + bibliography + + + catalog + + + dictionary + + + encyclopedia + + + handbook + + + legal article + + + index + + + discography + + + legislation + + + theses + + + survey of literature + + + review + + + programmed text + + + filmography + + + directory + + + statistics + + + technical report + + + legal case and case notes + + + law report or digest + + + treaty + + + + + + conference publication + + + + + + + + numeric data + + + database + + + font + + + game + + + + + + patent + + + festschrift + + + + biography + + + + + essay + + + drama + + + comic strip + + + fiction + + + humor, satire + + + letter + + + novel + + + short story + + + speech + + + + + + + biography + + + conference publication + + + drama + + + essay + + + fiction + + + folktale + + + history + + + humor, satire + + + memoir + + + poetry + + + rehearsal + + + reporting + + + sound + + + speech + + + + + + + art original + + + kit + + + art reproduction + + + diorama + + + filmstrip + + + legal article + + + picture + + + graphic + + + technical drawing + + + motion picture + + + chart + + + flash card + + + microscope slide + + + model + + + realia + + + slide + + + transparency + + + videorecording + + + toy + + + + + + + + + + abvxyz + - + + + + + + + + + code + marccountry + + + + + + + + code + iso3166 + + + + + + + + text + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + monographic + continuing + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + reformatted digital + + + digitized microfilm + + + digitized other analog + + + + + + + + + + + + + + + +
braille +
+ +
print +
+ +
electronic +
+ +
microfiche +
+ +
microfilm +
+
+ + +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + +
+ + + + access + + + preservation + + + replacement + + + + + +
chip cartridge +
+ +
computer optical disc cartridge +
+ +
magnetic disc +
+ +
magneto-optical disc +
+ +
optical disc +
+ +
remote +
+ +
tape cartridge +
+ +
tape cassette +
+ +
tape reel +
+ + +
celestial globe +
+ +
earth moon globe +
+ +
planetary or lunar globe +
+ +
terrestrial globe +
+ + +
kit +
+ + +
atlas +
+ +
diagram +
+ +
map +
+ +
model +
+ +
profile +
+ +
remote-sensing image +
+ +
section +
+ +
view +
+ + +
aperture card +
+ +
microfiche +
+ +
microfiche cassette +
+ +
microfilm cartridge +
+ +
microfilm cassette +
+ +
microfilm reel +
+ +
microopaque +
+ + +
film cartridge +
+ +
film cassette +
+ +
film reel +
+ + +
chart +
+ +
collage +
+ +
drawing +
+ +
flash card +
+ +
painting +
+ +
photomechanical print +
+ +
photonegative +
+ +
photoprint +
+ +
picture +
+ +
print +
+ +
technical drawing +
+ + +
notated music +
+ + +
filmslip +
+ +
filmstrip cartridge +
+ +
filmstrip roll +
+ +
other filmstrip type +
+ +
slide +
+ +
transparency +
+ +
remote-sensing image +
+ +
cylinder +
+ +
roll +
+ +
sound cartridge +
+ +
sound cassette +
+ +
sound disc +
+ +
sound-tape reel +
+ +
sound-track film +
+ +
wire recording +
+ + +
braille +
+ +
combination +
+ +
moon +
+ +
tactile, with no writing system +
+ + +
braille +
+ +
large print +
+ +
regular print +
+ +
text in looseleaf binder +
+ + +
videocartridge +
+ +
videocassette +
+ +
videodisc +
+ +
videoreel +
+ + + + + + + + + + abce + + + +
+ + + + + + + + + + ab + + + + + + + + agrt + + + + + + + ab + + + + + + + + + adolescent + + + adult + + + general + + + juvenile + + + preschool + + + specialized + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + defg + + + + + + + + + + + + marcgac + + + + + + iso3166 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + abx + + + + + + + ab + + + + + + + + + + + + + + + + + + + + + + + + + + + + ab + + + + + + + + + + av + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">av</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + abcx3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="subfieldSelect"> + <xsl:with-param name="codes">adfgklmorsv</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + aq + t + g + + + + + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklmorsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">dg</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + + + c + t + dgn + + + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:call-template name="specialSubfieldSelect"> + <xsl:with-param name="anyCodes">tfklsv</xsl:with-param> + <xsl:with-param name="axis">t</xsl:with-param> + <xsl:with-param name="afterCodes">g</xsl:with-param> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + + + + + + + aqdc + t + gn + + + + + + + + + + + + + + adfgklmorsv + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind2+1)"/> + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + + isbn + + + + + + + + + + isrc + + + + + + + + + + ismn + + + + + + + + + + sici + + + + ab + + + + + + issn + + + + + + + + lccn + + + + + + + + + + issue number + matrix number + music plate + music publisher + videorecording identifier + + + + + + + ba + ab + + + + + + + + + + ab + + + + + + + + doi + hdl + uri + + + + + + + + + + + + + + + + + y3z + + + + + + + + + + + + + + + + + + + + + y3 + + + + + + + z + + + + + + + + + + + + + + + + + + abje + + + + + + + + abcd35 + + + + + + + abcde35 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + n + n + fgkdlmor + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + g + g + pst + + + + + p + p + fgkdlmor + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdn + + + + + + + + + + aq + + + + :,;/ + + + + + + + + + + acdeq + + + + + + constituent + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + <xsl:call-template name="chopPunctuation"> + <xsl:with-param name="chopString"> + <xsl:value-of select="."></xsl:value-of> + </xsl:with-param> + </xsl:call-template> + + + + + + + + + + + + + + + code + marcgac + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + lcsh + lcshac + mesh + + nal + csh + rvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + aq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + cdnp + + + + + + + + + + + + + + + + abcdeqnp + + + + + + + + + + + + + + + + + + + + + adfhklor + + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + <xsl:value-of select="substring($titleChop,@ind1+1)"/> + + + + + + <xsl:value-of select="$titleChop" /> + + + + + + + + + + + + + + + + + abcd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bc + + + + + + + + + + + + + + + + + + + + + + + + + + + yes + + + + + + + + + + + + + + + + + + + + + + + + + + + Arabic + Latin + Chinese, Japanese, Korean + Cyrillic + Hebrew + Greek + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + summary or subtitle + sung or spoken text + libretto + table of contents + accompanying material + translation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .:,;/ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
$$ WHERE name = 'mods32'; + +CREATE OR REPLACE FUNCTION biblio.extract_metabib_field_entry ( rid BIGINT, default_joiner TEXT ) RETURNS SETOF metabib.field_entry_template AS $func$ +DECLARE + bib biblio.record_entry%ROWTYPE; + idx config.metabib_field%ROWTYPE; + xfrm config.xml_transform%ROWTYPE; + prev_xfrm TEXT; + transformed_xml TEXT; + xml_node TEXT; + xml_node_list TEXT[]; + facet_text TEXT; + browse_text TEXT; + sort_value TEXT; + raw_text TEXT; + curr_text TEXT; + joiner TEXT := default_joiner; -- XXX will index defs supply a joiner? + authority_text TEXT; + authority_link BIGINT; + output_row metabib.field_entry_template%ROWTYPE; +BEGIN + + -- Start out with no field-use bools set + output_row.browse_field = FALSE; + output_row.facet_field = FALSE; + output_row.search_field = FALSE; + + -- Get the record + SELECT INTO bib * FROM biblio.record_entry WHERE id = rid; + + -- Loop over the indexing entries + FOR idx IN SELECT * FROM config.metabib_field ORDER BY format LOOP + + joiner := COALESCE(idx.joiner, default_joiner); + + SELECT INTO xfrm * from config.xml_transform WHERE name = idx.format; + + -- See if we can skip the XSLT ... it's expensive + IF prev_xfrm IS NULL OR prev_xfrm <> xfrm.name THEN + -- Can't skip the transform + IF xfrm.xslt <> '---' THEN + transformed_xml := oils_xslt_process(bib.marc,xfrm.xslt); + ELSE + transformed_xml := bib.marc; + END IF; + + prev_xfrm := xfrm.name; + END IF; + + xml_node_list := perl_oils_xpath( idx.xpath, transformed_xml, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + + raw_text := NULL; + FOR xml_node IN SELECT x FROM unnest(xml_node_list) AS x LOOP + CONTINUE WHEN xml_node !~ E'^\\s*<'; + + -- XXX much of this should be moved into oils_xpath_string... + curr_text := ARRAY_TO_STRING(evergreen.array_remove_item_by_value(evergreen.array_remove_item_by_value( + oils_xpath( '//text()', + REGEXP_REPLACE( + REGEXP_REPLACE( -- This escapes all &s not followed by "amp;". Data ise returned from oils_xpath (above) in UTF-8, not entity encoded + REGEXP_REPLACE( -- This escapes embeded [^<]+)(<)([^>]+<)$re$, + E'\\1<\\3', + 'g' + ), + '&(?!amp;)', + '&', + 'g' + ), + E'\\s+', + ' ', + 'g' + ) + ), ' '), ''), + joiner + ); + + CONTINUE WHEN curr_text IS NULL OR curr_text = ''; + + IF raw_text IS NOT NULL THEN + raw_text := raw_text || joiner; + END IF; + + raw_text := COALESCE(raw_text,'') || curr_text; + + -- autosuggest/metabib.browse_entry + IF idx.browse_field THEN + + IF idx.browse_xpath IS NOT NULL AND idx.browse_xpath <> '' THEN + browse_text := oils_xpath_string( idx.browse_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + browse_text := curr_text; + END IF; + + IF idx.browse_sort_xpath IS NOT NULL AND + idx.browse_sort_xpath <> '' THEN + + sort_value := oils_xpath_string( + idx.browse_sort_xpath, xml_node, joiner, + ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] + ); + ELSE + sort_value := browse_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(browse_text, E'\\s+', ' ', 'g')); + output_row.sort_value := + public.naco_normalize(sort_value); + + output_row.authority := NULL; + + IF idx.authority_xpath IS NOT NULL AND idx.authority_xpath <> '' THEN + authority_text := oils_xpath_string( + idx.authority_xpath, xml_node, joiner, + ARRAY[ + ARRAY[xfrm.prefix, xfrm.namespace_uri], + ARRAY['xlink','http://www.w3.org/1999/xlink'] + ] + ); + + IF authority_text ~ '^\d+$' THEN + authority_link := authority_text::BIGINT; + PERFORM * FROM authority.record_entry WHERE id = authority_link; + IF FOUND THEN + output_row.authority := authority_link; + END IF; + END IF; + + END IF; + + output_row.browse_field = TRUE; + -- Returning browse rows with search_field = true for search+browse + -- configs allows us to retain granularity of being able to search + -- browse fields with "starts with" type operators (for example, for + -- titles of songs in music albums) + IF idx.search_field THEN + output_row.search_field = TRUE; + END IF; + RETURN NEXT output_row; + output_row.browse_field = FALSE; + output_row.search_field = FALSE; + output_row.sort_value := NULL; + END IF; + + -- insert raw node text for faceting + IF idx.facet_field THEN + + IF idx.facet_xpath IS NOT NULL AND idx.facet_xpath <> '' THEN + facet_text := oils_xpath_string( idx.facet_xpath, xml_node, joiner, ARRAY[ARRAY[xfrm.prefix, xfrm.namespace_uri]] ); + ELSE + facet_text := curr_text; + END IF; + + output_row.field_class = idx.field_class; + output_row.field = -1 * idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(facet_text, E'\\s+', ' ', 'g')); + + output_row.facet_field = TRUE; + RETURN NEXT output_row; + output_row.facet_field = FALSE; + END IF; + + END LOOP; + + CONTINUE WHEN raw_text IS NULL OR raw_text = ''; + + -- insert combined node text for searching + IF idx.search_field THEN + output_row.field_class = idx.field_class; + output_row.field = idx.id; + output_row.source = rid; + output_row.value = BTRIM(REGEXP_REPLACE(raw_text, E'\\s+', ' ', 'g')); + + output_row.search_field = TRUE; + RETURN NEXT output_row; + output_row.search_field = FALSE; + END IF; + + END LOOP; + +END; + +$func$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$ +DECLARE + fclass RECORD; + ind_data metabib.field_entry_template%ROWTYPE; + mbe_row metabib.browse_entry%ROWTYPE; + mbe_id BIGINT; + b_skip_facet BOOL; + b_skip_browse BOOL; + b_skip_search BOOL; + value_prepped TEXT; +BEGIN + + SELECT COALESCE(NULLIF(skip_facet, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_facet_indexing' AND enabled)) INTO b_skip_facet; + SELECT COALESCE(NULLIF(skip_browse, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_browse_indexing' AND enabled)) INTO b_skip_browse; + SELECT COALESCE(NULLIF(skip_search, FALSE), EXISTS (SELECT enabled FROM config.internal_flag WHERE name = 'ingest.skip_search_indexing' AND enabled)) INTO b_skip_search; + + PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled; + IF NOT FOUND THEN + IF NOT b_skip_search THEN + FOR fclass IN SELECT * FROM config.metabib_class LOOP + -- RAISE NOTICE 'Emptying out %', fclass.name; + EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id; + END LOOP; + END IF; + IF NOT b_skip_facet THEN + DELETE FROM metabib.facet_entry WHERE source = bib_id; + END IF; + IF NOT b_skip_browse THEN + DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id; + END IF; + END IF; + + FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP + IF ind_data.field < 0 THEN + ind_data.field = -1 * ind_data.field; + END IF; + + IF ind_data.facet_field AND NOT b_skip_facet THEN + INSERT INTO metabib.facet_entry (field, source, value) + VALUES (ind_data.field, ind_data.source, ind_data.value); + END IF; + + IF ind_data.browse_field AND NOT b_skip_browse THEN + -- A caveat about this SELECT: this should take care of replacing + -- old mbe rows when data changes, but not if normalization (by + -- which I mean specifically the output of + -- evergreen.oils_tsearch2()) changes. It may or may not be + -- expensive to add a comparison of index_vector to index_vector + -- to the WHERE clause below. + + value_prepped := metabib.browse_normalize(ind_data.value, ind_data.field); + SELECT INTO mbe_row * FROM metabib.browse_entry + WHERE value = value_prepped AND sort_value = ind_data.sort_value; + + IF FOUND THEN + mbe_id := mbe_row.id; + ELSE + INSERT INTO metabib.browse_entry + ( value, sort_value ) VALUES + ( value_prepped, ind_data.sort_value ); + + mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS); + END IF; + + INSERT INTO metabib.browse_entry_def_map (entry, def, source, authority) + VALUES (mbe_id, ind_data.field, ind_data.source, ind_data.authority); + END IF; + + IF ind_data.search_field AND NOT b_skip_search THEN + -- Avoid inserting duplicate rows + EXECUTE 'SELECT 1 FROM metabib.' || ind_data.field_class || + '_field_entry WHERE field = $1 AND source = $2 AND value = $3' + INTO mbe_id USING ind_data.field, ind_data.source, ind_data.value; + -- RAISE NOTICE 'Search for an already matching row returned %', mbe_id; + IF mbe_id IS NULL THEN + EXECUTE $$ + INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value) + VALUES ($$ || + quote_literal(ind_data.field) || $$, $$ || + quote_literal(ind_data.source) || $$, $$ || + quote_literal(ind_data.value) || + $$);$$; + END IF; + END IF; + + END LOOP; + + IF NOT b_skip_search THEN + PERFORM metabib.update_combined_index_vectors(bib_id); + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + +-- Don't use Title Proper search field as the browse field +UPDATE config.metabib_field SET browse_field = FALSE, browse_xpath = NULL, browse_sort_xpath = NULL WHERE id = 6; + +-- Create a new Title Proper browse config +INSERT INTO config.metabib_field ( id, field_class, name, label, format, xpath, search_field, authority_xpath, browse_field, browse_sort_xpath ) VALUES + (31, 'title', 'browse', oils_i18n_gettext(31, 'Title Proper (Browse)', 'cmf', 'label'), 'mods32', $$//mods32:mods/mods32:titleBrowse$$, FALSE, '//@xlink:href', TRUE, $$*[local-name() != "nonSort"]$$ ); + +COMMIT; + +-- \qecho This is a browse-only reingest of your bib records. It may take a while. +-- \qecho You may cancel now without losing the effect of the rest of the +-- \qecho upgrade script, and arrange the reingest later. +-- \qecho . +-- SELECT metabib.reingest_metabib_field_entries(id, TRUE, FALSE, TRUE) +-- FROM biblio.record_entry; diff --git a/KCLS/sql/kmain221/0845.schema.browse_pivots_stable.sql b/KCLS/sql/kmain221/0845.schema.browse_pivots_stable.sql new file mode 100644 index 0000000000..b4ab854af0 --- /dev/null +++ b/KCLS/sql/kmain221/0845.schema.browse_pivots_stable.sql @@ -0,0 +1,8 @@ +BEGIN; + +ALTER FUNCTION metabib.browse_pivot (integer[], text) STABLE; +ALTER FUNCTION metabib.browse_bib_pivot (integer[], text) STABLE; +ALTER FUNCTION metabib.browse_authority_pivot (integer[], text) STABLE; +ALTER FUNCTION metabib.browse_authority_refs_pivot (integer[], text) STABLE; + +COMMIT; \ No newline at end of file diff --git a/KCLS/sql/kmain221/0846.function.vand-add_field.sql b/KCLS/sql/kmain221/0846.function.vand-add_field.sql new file mode 100644 index 0000000000..f1cd1fa61f --- /dev/null +++ b/KCLS/sql/kmain221/0846.function.vand-add_field.sql @@ -0,0 +1,81 @@ +BEGIN; + +CREATE OR REPLACE FUNCTION vandelay.add_field ( target_xml TEXT, source_xml TEXT, field TEXT, force_add INT ) RETURNS TEXT AS $_$ + + use MARC::Record; + use MARC::File::XML (BinaryEncoding => 'UTF-8'); + use MARC::Charset; + use strict; + + MARC::Charset->assume_unicode(1); + + my $target_xml = shift; + my $source_xml = shift; + my $field_spec = shift; + my $force_add = shift || 0; + + my $target_r = MARC::Record->new_from_xml( $target_xml ); + my $source_r = MARC::Record->new_from_xml( $source_xml ); + + return $target_xml unless ($target_r && $source_r); + + my @field_list = split(',', $field_spec); + + my %fields; + for my $f (@field_list) { + $f =~ s/^\s*//; $f =~ s/\s*$//; + if ($f =~ /^(.{3})(\w*)(?:\[([^]]*)\])?$/) { + my $field = $1; + $field =~ s/\s+//; + my $sf = $2; + $sf =~ s/\s+//; + my $match = $3; + $match =~ s/^\s*//; $match =~ s/\s*$//; + $fields{$field} = { sf => [ split('', $sf) ] }; + if ($match) { + my ($msf,$mre) = split('~', $match); + if (length($msf) > 0 and length($mre) > 0) { + $msf =~ s/^\s*//; $msf =~ s/\s*$//; + $mre =~ s/^\s*//; $mre =~ s/\s*$//; + $fields{$field}{match} = { sf => $msf, re => qr/$mre/ }; + } + } + } + } + + for my $f ( keys %fields) { + if ( @{$fields{$f}{sf}} ) { + for my $from_field ($source_r->field( $f )) { + my @tos = $target_r->field( $f ); + if (!@tos) { + next if (exists($fields{$f}{match}) and !$force_add); + my @new_fields = map { $_->clone } $source_r->field( $f ); + $target_r->insert_fields_ordered( @new_fields ); + } else { + for my $to_field (@tos) { + if (exists($fields{$f}{match})) { + next unless (grep { $_ =~ $fields{$f}{match}{re} } $to_field->subfield($fields{$f}{match}{sf})); + } + for my $old_sf ($from_field->subfields) { + $to_field->add_subfields( @$old_sf ) if grep(/$$old_sf[0]/,@{$fields{$f}{sf}}); + } + } + } + } + } else { + my @new_fields = map { $_->clone } $source_r->field( $f ); + $target_r->insert_fields_ordered( @new_fields ); + } + } + + $target_xml = $target_r->as_xml_record; + $target_xml =~ s/^<\?.+?\?>$//mo; + $target_xml =~ s/\n//sgo; + $target_xml =~ s/>\s+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -4135,6 +4165,89 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + register_method( + method => "browseSetNav", + api_name => "open-ils.search.metabib.browse.setnav" +); + +#--------------------------------------------------------------------- +# This osrf call is to allow the navigation links on the browse_items +# page to be populated with the next and previous record. +#--------------------------------------------------------------------- +#["10248","id|bibcn","YR973.003%20CON","1"] +sub browseSetNav { + my $self = shift; + my $client = shift; + my $browseEntry = shift; + my $searchClass = shift; + my $searchTerm = shift; + my $locg = shift; + + my $e = new_editor; + + #Hardcoding to true becaue at time of creation browse search will only be available through the staff client + my $isStaffClient = 't'; + + my $results = $e->json_query({ + from => [ "metabib.browse", $searchClass, $searchTerm, $locg, undef, $isStaffClient, $browseEntry, '3' ] + }); + + my $navResults = {}; +$logger->debug(Dumper($results)); + foreach (@$results) { + + my $current = $_; + + switch ($current->{row_number}) { + + case -1 { +$logger->debug("next: " . Dumper($current)); + $navResults->{next_browse} = $current->{browse_entry}; + $navResults->{next_field} = $current->{fields}; + } + + case 0 { +$logger->debug("current: " . Dumper($current)); + $navResults->{current_value} = $current->{value}; + } + + case 1 { +$logger->debug("previous: " . Dumper($current)); + $navResults->{previous_browse} = $current->{browse_entry}; + $navResults->{previous_field} = $current->{fields}; + } + } + } + + return $navResults; +} # ------------------------------------------------------------------ # Create custom dictionaries like so: diff --git a/Open-ILS/src/perlmods/lib/OpenILS/Application/Search/Biblio.pm b/Open-ILS/src/perlmods/lib/OpenILS/Application/Search/Biblio.pm index e5eab8f64c..15f2f9c61a 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/Application/Search/Biblio.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/Application/Search/Biblio.pm @@ -880,8 +880,8 @@ sub kcls_scrub_multiclass_query { my $actual_query; # first seperate the search query from the modifiers - my @modifiers = - ("mattype","item_lang","audience_group","after","sort","site","depth"); + my @modifiers = qw/mattype item_lang + audience_group after sort has_browse_entry site depth/; for my $i (0 .. $#modifiers) { @@ -889,9 +889,11 @@ sub kcls_scrub_multiclass_query { $actual_query = $1; # Then pull off any extraneous parens - if ($actual_query =~ m/.*[\(\)].*/){ - $actual_query =~ s/[\(\)]//g; - $query =~ s/(.*)( $modifiers[$i]\()/$actual_query$2/g; + if (!$actual_query =~ m/has_browse_entry/){ + if ($actual_query =~ m/.*[\(\)].*/) { + $actual_query =~ s/[\(\)]//g; + $query =~ s/(.*)( $modifiers[$i]\()/$actual_query$2/g; + } } last; # Kill the loop diff --git a/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Driver/Pg/QueryParser.pm b/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Driver/Pg/QueryParser.pm index 1f5edacf41..9b6063e6ef 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Driver/Pg/QueryParser.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Driver/Pg/QueryParser.pm @@ -1,5 +1,6 @@ use strict; use warnings; +use Switch; package OpenILS::Application::Storage::Driver::Pg::QueryParser; use OpenILS::Application::Storage::QueryParser; @@ -1681,11 +1682,28 @@ sub flatten { "search.calculate_visibility_attribute_test('status','{".join(',', @{$filter->args})."}',$negate)"; } + # TODO: is this comment still relevant? + # Case for displaying browse results + # Needs sub-casesses for title, author, subject, series, call number(id:bibcn) + } elsif ($filter->name eq 'has_browse_entry') { - if (@{$filter->args} >= 2) { + if (@{$filter->args} >= 3) { + my $browseType = shift @{$filter->args}; my $entry = int(shift @{$filter->args}); my $fields = join(",", map(int, @{$filter->args})); - $from .= "\n" . $spc x 3 . sprintf("INNER JOIN metabib.browse_entry_def_map mbedm ON (mbedm.source = m.source AND mbedm.entry = %d AND mbedm.def IN (%s))", $entry, $fields); + my $defMap; + #Set the correct table name based on the browse search type + switch ($browseType) { + case "title" { $defMap = "browse_title_entry_def_map" } + case "author" { $defMap = "browse_author_entry_def_map" } + case "subject" { $defMap = "browse_subject_entry_def_map" } + case "series" { $defMap = "browse_series_entry_def_map" } + # XXX I don't know where or why 'identifier#' is being inserted, but it seems harmless + case "identifier#id|bibcn" { $defMap = "browse_call_number_entry_def_map" } + # Default to title + else { $defMap = "browse_title_entry_def_map" } + } + $from .= "\n" . $spc x 3 . sprintf("INNER JOIN metabib.$defMap mbedm ON (mbedm.source = m.source AND mbedm.entry = %d AND mbedm.def IN (%s))", $entry, $fields); } } elsif ($filter->name eq 'edit_date' or $filter->name eq 'create_date') { # bre.create_date and bre.edit_date filtering diff --git a/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Publisher/authority.pm b/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Publisher/authority.pm index 893fa2d8f9..358adb7ef6 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Publisher/authority.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/Application/Storage/Publisher/authority.pm @@ -44,7 +44,7 @@ sub validate_tag { push @selects, "SELECT record FROM $search_table ". - "WHERE tag = ? AND subfield = ? AND value = ?"; + "WHERE tag = ? AND subfield = ? AND sort_value = public.naco_normalize(?)"; } my $sql; diff --git a/Open-ILS/src/perlmods/lib/OpenILS/Application/Vandelay.pm b/Open-ILS/src/perlmods/lib/OpenILS/Application/Vandelay.pm index 9f11bd84da..6e92d27e75 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/Application/Vandelay.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/Application/Vandelay.pm @@ -2457,4 +2457,25 @@ sub bib_queue_to_bucket { }; } +__PACKAGE__->register_method( + api_name => 'open-ils.vandelay.retrieve_matched_records', + method => 'matched_record', +); + +sub matched_record { + my ($self, $conn, $auth, $queued_bib_record, $type) = @_; + my $e = new_editor(authtoken=>$auth); + return $e->die_event unless $e->checkauth; + + my @rec_id_list = $e->json_query({ + from => [ "vandelay.get_matched_records", $queued_bib_record, $type] + }); + + for my $rec (@rec_id_list) { + $conn->respond($rec); + } + + return undef; +} + 1; diff --git a/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader.pm b/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader.pm index a06906eb85..63c1fac7b3 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader.pm @@ -145,10 +145,12 @@ sub load { return $self->load_library if $path =~ m|opac/library|; return $self->load_rresults if $path =~ m|opac/results|; + return $self->load_rresults if $path =~ m|opac/browse_items|; return $self->load_print_record if $path =~ m|opac/record/print|; return $self->load_record if $path =~ m|opac/record/\d|; return $self->load_cnbrowse if $path =~ m|opac/cnbrowse|; return $self->load_browse if $path =~ m|opac/browse|; + return $self->load_rresults if $path =~ m|opac/see_also|; return $self->load_mylist_add if $path =~ m|opac/mylist/add|; return $self->load_mylist_delete if $path =~ m|opac/mylist/delete|; diff --git a/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Browse.pm b/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Browse.pm index 8f537c51a5..c9e740b985 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Browse.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Browse.pm @@ -208,7 +208,7 @@ sub map_authority_headings_to_results { ] } } - }) or return; + }, {timeout => 600}) or return; my %auth_counts = map { $_->{authority} => $_->{count} } @$counts; @@ -282,9 +282,10 @@ sub flesh_browse_results { sub load_browse_impl { my ($self, @params) = @_; - my $results = $self->editor->json_query({ - from => [ "metabib.browse", @params ] - }); + my $results = $self->editor->json_query( + {from => [ "metabib.browse", @params ]}, + {timeout => 600} + ); if (not $results) { # DB error, not empty result set. $logger->warn( @@ -414,4 +415,330 @@ sub load_browse { return Apache2::Const::OK; } +# Loops through the results and determines if the browse entry is authoritative or not +# then passes that entry to the appropriate set_heading function. +sub gather_display_headings { + my ($self, $results) = @_; + + for my $browse_term (@$results) { + $browse_term->{ref_headings} = {}; + my $auth_id = $self->is_not_authoritative($browse_term->{browse_entry}); + if($auth_id) { + $self->set_see_heading($browse_term, $auth_id); + } else { + $self->set_5xx_heading($browse_term); + } + } +} + +# Processes headings for authorized browse terms. Takes a browse term from results. +# and determine what reference heading it should display. +sub set_5xx_heading { + my ($self, $browse_term) = @_; + + my $browse_term_marc = $self->get_browse_entry_marc($browse_term->{browse_entry}); + my %see_also_hash; + my $browse_term_id; + if($browse_term_marc) { + # Grab the browse terms authority id incase we use it later. + $browse_term_id = $browse_term_marc->field('901')->subfield('c'); + # Grab any Notes the browse term might have on its 680 field + $self->extract_public_general_notes($browse_term_marc, $browse_term); + my @see_also_fields = $browse_term_marc->field('5..'); + + # Loop over fields and extract the 0 subfield, which is the id of the authority linked too + # Grab the w, i and 4 subfields and stash that all in a hash keyed by the id + for my $field (@see_also_fields) { + my ($zero_subfield) = $field->subfield('0') =~ /(\d+)/; + my $w_subfield = $field->subfield('w'); + my $i_subfield = $field->subfield('i'); + my $four_subfield = $field->subfield('4'); + # If there is no w subfield just set it too all n's so we treat it like a standard see also + if(!$w_subfield) { + $w_subfield = 'nnnn'; + } + # Check for use of 663 and 665 fields + + + if($zero_subfield) { + # If we get here we know this reference links so check for 663 and 665 and populate them. + if($w_subfield =~ /...c/) { + $browse_term->{complex_see_also} = $browse_term_marc->field('663')->subfield('a'); + } + if($w_subfield =~ /...d/) { + # If we need the 665, grab all subfield a's and then concat them into an array + my @history_reference = $browse_term_marc->field('665')->subfield('a'); + my $display_string; + for my $part (@history_reference) { + $display_string = $display_string . ' ' . $part; + } + $browse_term->{history_reference} = $display_string; + } + $see_also_hash{$zero_subfield}{w} = $w_subfield; + $see_also_hash{$zero_subfield}{i} = $i_subfield; + $see_also_hash{$zero_subfield}{4} = $four_subfield; + } + } + } + + # Ugly loop but each loop should only be 1 or 2 iterations. It is necessary because of the structure of the data + for my $authority_field_name ( qw/authorities sees/ ) { + # Loop over the see list and authorities list + for my $references (@{$browse_term->{$authority_field_name}}) { + # Loop over the headings and dive down into the data structure + for my $heading (@{$references->{headings}}) { + for my $value (values %$heading) { + for my $entry (@$value) { + my $w_subfield = $see_also_hash{$entry->{target}}{w}; + my @params; + + # Check if w subfield exists and that it is not a g. Since we set w subfield to all n's if the link existed but did not have a w subfield we can + # assume if w subfield is null then the link does not exist in the browse authority record. If the subfield is a g we don't want to show g references + # that are on the browse entries marc. + if($w_subfield and $w_subfield !~ /g/) { + push @params, $w_subfield; + } elsif ($self->is_g_reference($entry, $browse_term_id)) { + # This reference is a g type from the entries marc record that links to the browse entry, so we want to show these g references. + push @params, 'gnnn'; + } + + if(@params) { + # If we have params then we have a link we want to show, so go ahead and fetch the heading. + my $raw_ref_heading = $self->editor->json_query({ + from => [ "authority.get_5xx_heading", @params ] + }); + my $ref_heading; + # Check for w subfield value r. If it is an r we need to get the i or 4 subield data and add that to the heading. + if($w_subfield =~ /^r/) { + if($see_also_hash{$entry->{target}}{i}) { + $ref_heading = @$raw_ref_heading[0]->{"authority.get_5xx_heading"} . " " . $see_also_hash{$entry->{target}}{i}; + } else { + $ref_heading = @$raw_ref_heading[0]->{"authority.get_5xx_heading"} . " " . $see_also_hash{$entry->{target}}{4}; + } + } + elsif ($w_subfield =~ /nnnc/) { + # Don't add "See Also" before headings with 663 explanation + $ref_heading = ''; + } + else { + $ref_heading = @$raw_ref_heading[0]->{"authority.get_5xx_heading"}; + } + # Set the heading and show flag so the template can show it. + $browse_term->{ref_headings}->{$entry->{target}}->{display} = $ref_heading; + $browse_term->{ref_headings}->{$entry->{target}}->{show} = 1; + # Grab the marc for this entry so we can fetch any notes + my $record = $self->get_marc_by_id($entry); + if($record) { + $self->extract_public_general_notes($record, $browse_term->{ref_headings}->{$entry->{target}}); + } + + } else { + # If we don't have any params then the entry does not appear on the browse marc or is not a g type so we must have grabbed something + # permissively. So don't show it. + $browse_term->{ref_headings}->{$entry->{target}}->{show} = 0; + } + } + } + } + } + } +} + +# Fetches a marc file by auth id +sub get_marc_by_id { + my ($self, $entry) = @_; + + # Get the entries marc + my $raw_entry_marc = $self->editor->json_query({ + select => { + are => ["marc"] + }, + from => { + are => { } + }, + where => {"+are" => {id => $entry->{target}}} + }); + + # Convert the marc from XML to a MARC::Record Object + my $record; + if($raw_entry_marc) { + eval { + $record = new_from_xml MARC::Record(@$raw_entry_marc[0]->{"marc"}); + }; + if ($@) { + $logger->warn("Error reading marc record"); + return undef; + } + } + return $record; +} + +# Takes entry and an authority id. Looks for the authority id in the marc of the browse entry +# and if found checks if the w subfield on the reference is a g. Returns 1 if this is the case, +# else returns 0 +sub is_g_reference { + my ($self, $entry, $browse_id) = @_; + + # Get the entries marc + my $record = $self->get_marc_by_id($entry); + if($record) { + # Look for the browse_id in the marc and check the w subfield value. + for my $field ($record->field('5..')) { + # Grab the id on the 0 subfield + my ($zero_subfield) = $field->subfield('0') =~ /(\d+)/; + if($zero_subfield == $browse_id) { + if($field->subfield('w') =~ /g/) { + return 1; + } + } + } + } + return 0; +} + +# Processes headings for unauthorized browse terms. Takes a browse term from results. +# and determine what reference heading it should display. +sub set_see_heading { + my ($self, $browse_term, $auth_id) = @_; + for my $sees (@{$browse_term->{sees}}) { + for my $heading (@{$sees->{headings}}) { + for my $value (values %$heading) { + for my $entry (@$value) { + if($entry->{target} == $auth_id) { + $browse_term->{ref_headings}->{$entry->{target}}->{display} = "See"; + $browse_term->{ref_headings}->{$entry->{target}}->{show} = 1; + # Get the see references marc + my $record = $self->get_marc_by_id($entry); + if($record) { + $self->extract_public_general_notes($record, $browse_term->{ref_headings}->{$entry->{target}}); + } + } else { + $browse_term->{ref_headings}->{$entry->{target}}->{show} = 0; + } + } + } + } + } +} + +# Takes an entry into the browse entry series of tables and returns its marc record as a +# MARC::Record object +sub get_browse_entry_marc { + my ($self, $browse_id) = @_; + my @params; + push @params, $browse_id; + push @params, $self->cgi->param('qtype'); + + my $raw_marc = $self->editor->json_query({ + from => [ "metabib.get_browse_entry_marc_record", @params ] + }); + + my $record; + #CHange to check actual marc not hash + if($raw_marc && @$raw_marc[0]->{"metabib.get_browse_entry_marc_record"}) { + eval { + $record = new_from_xml MARC::Record(@$raw_marc[0]->{"metabib.get_browse_entry_marc_record"}); + }; + if ($@) { + $logger->warn("Error reading marc record"); + return undef; + } + } + return $record; +} + +# This function takes an id into the metabib.browse_*****_entry table +# and checks to see if that entry is a 400, 430 or 450 reference in another authority +# record. This is useful to know so we can filter out See Also references +# for non-authoritative entries. +sub is_not_authoritative { + my $self = shift; + my $id = shift; + + my $result; + + if($self->cgi->param('qtype') eq 'author') { + $result = $self->editor->json_query({ + select => { + acsaf => ["tag"], + ash => ["record"] + }, + from => { + acsaf => { + ash => { + fkey => "id", field => "atag", + join => { + mbaeshm => { + fkey => "id", field => "simple_heading", + join => { + mbae => { + fkey => "entry", field => "id" + } + } + } + } + } + } + }, + where => {"+mbae" => {id => $id}} + }); + } elsif($self->cgi->param('qtype') eq 'subject') { + $result = $self->editor->json_query({ + select => { + acsaf => ["tag"], + ash => ["record"] + }, + from => { + acsaf => { + ash => { + fkey => "id", field => "atag", + join => { + mbseshm => { + fkey => "id", field => "simple_heading", + join => { + mbse => { + fkey => "entry", field => "id" + } + } + } + } + } + } + }, + where => {"+mbse" => {id => $id}} + }); + } elsif($self->cgi->param('qtype') eq 'series') { + $result = $self->editor->json_query({ + select => { + acsaf => ["tag"], + ash => ["record"] + }, + from => { + acsaf => { + ash => { + fkey => "id", field => "atag", + join => { + mbseeshm => { + fkey => "id", field => "simple_heading", + join => { + mbsee => { + fkey => "entry", field => "id" + } + } + } + } + } + } + }, + where => {"+mbsee" => {id => $id}} + }); + } + + # If the result tag begins with a 4 we have an unauthorized heading so return true. + if($result->[0]{tag} =~ /^4/) { + return $result->[0]{record}; + } + return 0; +} + 1; diff --git a/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Search.pm b/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Search.pm index c6e8923440..fd6105f7e2 100644 --- a/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Search.pm +++ b/Open-ILS/src/perlmods/lib/OpenILS/WWW/EGCatLoader/Search.pm @@ -377,6 +377,10 @@ sub load_rresults { my @mods = $cgi->param('modifier'); my $is_meta = (@mods and grep {$_ eq 'metabib'} @mods and !$metarecord); my $id_key = $is_meta ? 'mmr_id' : 'bre_id'; + my $isBrowse = 0; + if($cgi->param('bterm')) { + $isBrowse = 1; + } # find the last record in the set, then redirect my $find_last = $cgi->param('find_last'); @@ -610,6 +614,7 @@ sub load_rresults { } $ctx->{search_facets} = $facets; + $ctx->{processed_search_query} = $user_query; return Apache2::Const::OK; } @@ -647,7 +652,7 @@ sub check_1hit_redirect { $self->ctx->{opac_root}, $$rec_ids[0], ); - + # If we get here from the same record detail page to which we # now wish to redirect, do not perform the redirect. This # approach seems to work well, with the rare exception of diff --git a/Open-ILS/src/templates/opac/css/style.css.tt2 b/Open-ILS/src/templates/opac/css/style.css.tt2 index 31bd8171fd..1d2a912f92 100644 --- a/Open-ILS/src/templates/opac/css/style.css.tt2 +++ b/Open-ILS/src/templates/opac/css/style.css.tt2 @@ -430,7 +430,7 @@ for now until a better color is picked - if needed. [%- END %] } -#adv_search.on, #num_search.on, #expert_search.on { +#adv_search.on, #num_search.on, #expert_search.on, #browse_search.on { color: [% css_colors.accent_darker %]; background-color: [% css_colors.background %]; text-decoration: none; @@ -2379,6 +2379,7 @@ a.preflib_change { line-height: normal; text-decoration: none; } + .rdetail-holding-group { [% IF rtl == 't' -%] margin-right: 1.5em; @@ -2400,6 +2401,66 @@ a.preflib_change { margin-left: 1.5em; [% END -%] } + +#adv_search_tabs { + height: 25px; +} +#adv_search_tabs a{ + font-size: 93%; + margin: 2px; + margin-right: 2px; + margin-bottom: 2px; + padding: 2px 2px 5px 2px; + -moz-border-radius: 10px 0px 0px 0px; + border-radius: 7px 7px 0px 0px; +} + +#search-box > span { + margin: 0 1em; +} +.browse-error { + font-weight: bold; + font-color: #c00; +} +.browse-result-sources, .browse-result-authority-bib-links { + margin-left: 1em; +} +.browse-result-best-match { + font-weight: bold; +} +.browse-pager { + margin: 2ex 0; +} +.browse-result-list { + padding-bottom: 0.5ex; +} +.browse-shortcuts { + font-size: 120%; +} +.browse-result-authority-field-name { + font-style: italic; + margin-right: 1em; +} +.browse-leading-article-warning { + font-style: italic; + font-size: 110%; +} +.browse-public-general-note { + font-size: 110%; +} +.browse-public-general-note-label { } +.browse-public-general-note-institution { + font-style: normal; + font-weight: bold; +} +.browse-public-general-note-body { + font-style: italic; +} + +.rdetail-holding-group { margin-left: 1.5em; } +.rdetail-holding-group span { margin-left: 1.5em; } +.rdetail-holding-group .paging { margin-left: 1.5em; } + #rdetail_deleted_exp { font-weight: bold; padding: 1em; diff --git a/Open-ILS/web/css/skin/default/opac/semiauto.css b/Open-ILS/web/css/skin/default/opac/semiauto.css index 96729ebdb0..3af8d00927 100644 --- a/Open-ILS/web/css/skin/default/opac/semiauto.css +++ b/Open-ILS/web/css/skin/default/opac/semiauto.css @@ -122,7 +122,6 @@ .opac-auto-135 { position: relative; top: -15px; left: -23px; } .opac-auto-136 { position: relative; top: 161px; left: 172px; } .opac-auto-137 { position: relative; top: 161px; left: -23px; } -#home_adv_search_link { position: relative; top: -1px; left: 10px; } #util_back_btn { position: relative; top: 1px; left: 10px; } .opac-auto-140 { position: relative; top: -2px; } #util_help_btn { position: relative; top: 2px; left: 40px; } diff --git a/Open-ILS/web/css/skin/default/opac/style.css b/Open-ILS/web/css/skin/default/opac/style.css index b12fac803e..76eefb690b 100644 --- a/Open-ILS/web/css/skin/default/opac/style.css +++ b/Open-ILS/web/css/skin/default/opac/style.css @@ -310,6 +310,14 @@ div.select-wrapper:hover { background: url('/images/expert_search_on.gif') no-repeat bottom !important; } +#browse_search { + width:156px; + background:url('/images/browse_search_off.gif') no-repeat bottom; +} +#browse_search.on { + background: url('/images/browse_search_on.gif') no-repeat bottom !important; +} + #acct_tabs, #acct_fines_tabs { height:33px; width:974px; @@ -863,6 +871,22 @@ div.select-wrapper:hover { font-size: 14px; } +.search_catalog_lbl a { + font-size: 10px; +} + +.adv_search_catalog_lbl a { + font-size: 10px; +} + +.browse_the_catalog_lbl { + font-size: 14px; +} + +.browse_the_catalog_lbl a { + font-size: 14px; +} + .lbl1 { font-size:14px; font-weight:bold; @@ -1223,3 +1247,101 @@ a.dash-link:hover { text-decoration: underline !important; } border-bottom-color: [% css_colors.border_dark %]; border-bottom-style: solid; } + +/*Browse Search Results*/ + +#browse-controls { + margin-bottom:10px; + padding-top:15px; +} + +.odd { + background-color: #DDDDD4; +} + +.even { + +} + +.browse-result-list { + /*list-style-type:none;*/ + margin-top:5px; +} + +.browse-result { + padding-bottom:4px; + padding-left:5px; + padding-right:5px; + padding-top:4px; +} + +.browse-result-sources { + float: right; +} + +.browse-result-best-match { + font-weight: bolder; +} + +.browse-pager { + padding-bottom:5px; + padding-left:15px; + padding-right:0px; + padding-top:5px; +} + +.browse-query-title { + float:left; +} + +.browse-pager:after { + clear: both; + content: ""; + display: block; +} + +.browse-pager-items { + float:right; +} + +.browse-pager-items:after { + clear: both; + content: ""; + display: block; +} + +.browse-pager-item { + float:left; + margin-right:7px; + margin-left:7px; + font-size:10px; +} + +.browse-leading-article-warning +{ + padding-bottom:0px; + padding-left:15px; + padding-right:0px; + padding-top:5px; +} + +.opac-button { + padding-bottom:3px; +} + +.contains-phrase-container { + margin-bottom:20px; + margin-left:30px; +} + +.browse-result-authority-headings li { + padding-bottom: 2px; + padding-top: 2px; +} + + +/*End Browse Search Results Area*/ + +.staff-client-link { + cursor:pointer; +} diff --git a/Open-ILS/web/images/browse_search_off.gif b/Open-ILS/web/images/browse_search_off.gif new file mode 100644 index 0000000000..373e27463d Binary files /dev/null and b/Open-ILS/web/images/browse_search_off.gif differ diff --git a/Open-ILS/web/images/browse_search_on.gif b/Open-ILS/web/images/browse_search_on.gif new file mode 100644 index 0000000000..e239b71bfc Binary files /dev/null and b/Open-ILS/web/images/browse_search_on.gif differ diff --git a/Open-ILS/web/js/ui/default/opac/browse_filtersort.js b/Open-ILS/web/js/ui/default/opac/browse_filtersort.js new file mode 100644 index 0000000000..a155bba3b2 --- /dev/null +++ b/Open-ILS/web/js/ui/default/opac/browse_filtersort.js @@ -0,0 +1,13 @@ +function sort_browse( element ) { + var sortValue = element.value; + var url = document.URL; + url = url.replace(/sort=[a-z]*(\.descending)?/, ''); + url = url.replace(/;sort=[a-z]*(\.descending)?/, ''); + url += ";sort=" + sortValue; + + //Return to first page when sorting + url = url.replace(/;page=[\d]+/, ''); + + window.location.href = url; +} + diff --git a/Open-ILS/web/js/ui/default/opac/browse_set_navigation.js b/Open-ILS/web/js/ui/default/opac/browse_set_navigation.js new file mode 100644 index 0000000000..e83a0ab461 --- /dev/null +++ b/Open-ILS/web/js/ui/default/opac/browse_set_navigation.js @@ -0,0 +1,132 @@ +function set_navigator () { + + dojo.require("fieldmapper.Fieldmapper"); + dojo.require("fieldmapper.dojoData"); + dojo.require("fieldmapper.OrgUtils"); + dojo.require('openils.PermaCrud'); + + var matches + var url = document.URL; + //get the following values to run metabib.browse + // - browse entry + var browseEntryRegex = /\%2C(\d+?)\%2C/; + matches = browseEntryRegex.exec(url); + var browseEntry = matches[1]; + // - serch class + var searchClassRegex = /qtype\=([a-zA-Z]+?)\;/; + matches = searchClassRegex.exec(url); + + // The URL includes the pipe code, not the pipe actual and the + // regex won't match on it. So we do the following... + var searchClass = 'id|bibcn'; + + if (matches != null && matches.length > 1){ + + searchClass = matches[1]; + } + + // - browse term + var searchTermRegex = /bterm\=(.+?)\;/; + matches = searchTermRegex.exec(url); + var searchTerm = matches[1]; + // - context_org (locg) + var locgRegex = /locg\=(\d+?)\;/; + matches = locgRegex.exec(url); + var locg = matches[1]; + + var retrieve = ['open-ils.search', 'open-ils.search.metabib.browse.setnav']; + var params = [ browseEntry, searchClass, searchTerm, locg ]; + + fieldmapper.standardRequest( + retrieve, + { async:true, + params:params, + oncomplete: function(r) { + var results = openils.Util.readResponse(r); + var url = document.URL; + // Force the 1hit browse redirect + url = url.replace(/(record)/, 'browse_items'); + // Reset results paging + url = url.replace(/;page=\d+/, ''); + + //Uncomment if you want to reset these options on set navigation. + // Remove any sorting + //url = url.replace(/(;sort=[a-z])/, ''); + // Remove Limit to available items + //url = url.replace(/(;modifier=available)/, ''); + // Remove detail view + //url = url.replace(/(;detail_record_view=1)/, ''); + + //var previousUrl; + //var nextUrl; + + var previousUrl = url.replace(/(.*[a-z]+%2C)[0-9]+%2C[0-9]+(.*)/, + "$1" + results.previous_browse + '%2C' + + results.previous_field + "$2"); + + var nextUrl = url.replace(/(.*[a-z]+%2C)[0-9]+%2C[0-9]+(.*)/, + "$1" + results.next_browse + '%2C' + + results.next_field + "$2"); + + //if (/.*qtype=id%7Cbibcn;.*/.test(url)) { + + //previousUrl = url.replace( + ////(;fi%3Ahas_browse_entry=id%7Cbibcn%2C)(.*)/, + //"$1" + results.previous_browse + '%2C' + //+ results.previous_field); + + //nextUrl = url.replace( + ////(;fi%3Ahas_browse_entry=id%7Cbibcn%2C)(.*)/, + //"$1" + results.next_browse + '%2C' + //+ results.next_field); + //} + + //else{ + + //previousUrl = url.replace(/(;fi%3Ahas_browse_entry=[a-z]+%2C)([\d+,?]+)/, "$1" + results.previous_browse); + //previousUrl = previousUrl.replace(/(;fi%3Ahas_browse_entry=[a-z]+%2C\d+%2C)(\d+,?]+)/, "$1" + results.previous_field); + //nextUrl = url.replace(/(;fi%3Ahas_browse_entry=[a-z]+%2C)(\d+,?]+)/, "$1" + results.next_browse); + //nextUrl = nextUrl.replace(/(;fi%3Ahas_browse_entry=[a-z]+%2C\d+%2C)(\d+,?]+)/, "$1" + results.next_field); + //} + + handleNavButton(document.getElementById("previous_set"), + previousUrl, results.previous_browse); + + handleNavButton(document.getElementById("next_set"), + nextUrl, results.next_browse); + + document.getElementById("current_set").innerHTML = results.current_value; + } + } + ); +} + +function handleNavButton(element, targetUrl, isShown){ + + if (isShown){ + + element.href = targetUrl; + element.className = ""; + } + + else{ + + element.className = "hidden"; + } +} + +function addLoadEvent(func) { + var oldonload = window.onload; + if (typeof window.onload != 'function') { + window.onload = func; + } else { + window.onload = function() { + if (oldonload) { + oldonload(); + } + func(); + } + } +} + +addLoadEvent (set_navigator); \ No newline at end of file diff --git a/Open-ILS/web/js/ui/default/opac/limit_available.js b/Open-ILS/web/js/ui/default/opac/limit_available.js new file mode 100644 index 0000000000..88f3b8600a --- /dev/null +++ b/Open-ILS/web/js/ui/default/opac/limit_available.js @@ -0,0 +1,13 @@ +function limit_available( element ) { + var url = document.URL; + //always try to remove the available modifier + url = url.replace(/;modifier=available/, ''); + + //if the box is checked add in the availavle modifier + if(element.checked){ + url += ";modifier=available"; + } + //Return to first page when sorting + url = url.replace(/;page=[\d]+/, ''); + window.location.href = url; +} diff --git a/Open-ILS/web/js/ui/default/opac/staff.js b/Open-ILS/web/js/ui/default/opac/staff.js index 51d9eaa284..cfab8ac2d8 100644 --- a/Open-ILS/web/js/ui/default/opac/staff.js +++ b/Open-ILS/web/js/ui/default/opac/staff.js @@ -216,7 +216,9 @@ function rdetail_next_prev_actions(index, count, prev, next, start, end, results function fullurl(url) { if (url.match(/eg\/opac\/results/)) { return location.href.replace(/\/eg\/opac\/.+$/, url); - } else { + } else if (url.match(/eg\/opac\/browse_items/)) { + return location.href.replace(/\/eg\/opac\/.+$/, url); + }else { return location.href.replace(/\/\d+\??.*/, '/' + url); } } diff --git a/Open-ILS/web/js/ui/default/vandelay/vandelay.js b/Open-ILS/web/js/ui/default/vandelay/vandelay.js index ab03a20954..0901311cbb 100644 --- a/Open-ILS/web/js/ui/default/vandelay/vandelay.js +++ b/Open-ILS/web/js/ui/default/vandelay/vandelay.js @@ -753,6 +753,7 @@ function vlLoadMatchUI(recId) { var matchSeenMap = {}; + //need to figure out what this is doing, without this, check boxes dont work for(var i = 0; i < dataStore.items.length; i++) { var item = dataStore.items[i]; item._id = i; // just need something unique diff --git a/Open-ILS/web/js/ui/kcls/cat/authority/list_id.js b/Open-ILS/web/js/ui/kcls/cat/authority/list_id.js index 7da461cada..a277edba9b 100644 --- a/Open-ILS/web/js/ui/kcls/cat/authority/list_id.js +++ b/Open-ILS/web/js/ui/kcls/cat/authority/list_id.js @@ -266,7 +266,7 @@ function getAssociatedBibs(authIds) { var count = 0; for (var bib in msg[authIds[i]]){ count ++; - dojo.place('
- ' + msg[authIds[i]][bib] + + dojo.place(' ', 'auth' + authIds[i] + 'BibResults', 'last'); diff --git a/Open-ILS/web/opac/locale/en-US/lang.dtd b/Open-ILS/web/opac/locale/en-US/lang.dtd index 106d38fe48..b24780efac 100644 --- a/Open-ILS/web/opac/locale/en-US/lang.dtd +++ b/Open-ILS/web/opac/locale/en-US/lang.dtd @@ -3908,8 +3908,17 @@ + + + + + + + + + diff --git a/Open-ILS/xul/staff_client/chrome/content/cat/opac.js b/Open-ILS/xul/staff_client/chrome/content/cat/opac.js index 6be3ba8937..304ef01237 100644 --- a/Open-ILS/xul/staff_client/chrome/content/cat/opac.js +++ b/Open-ILS/xul/staff_client/chrome/content/cat/opac.js @@ -669,7 +669,12 @@ function set_opac() { if (opac_url) { content_params.url = xulG.url_prefix( opac_url, true ); } else { + var sortMethod = g.data.browse_sort_default; + if(!g.data.browse_sort_default){ + sortMethod = 'pubdate.descending'; + } content_params.url = xulG.url_prefix( 'browser', true ); + content_params.url += '?sort='+ sortMethod; } if (g.data.adv_pane) { // For fun, we can have no extra params, extra params with &, or extra params with ;. diff --git a/Open-ILS/xul/staff_client/chrome/content/main/main.js b/Open-ILS/xul/staff_client/chrome/content/main/main.js index 9e3ad41c6c..7a0bfa218a 100644 --- a/Open-ILS/xul/staff_client/chrome/content/main/main.js +++ b/Open-ILS/xul/staff_client/chrome/content/main/main.js @@ -391,9 +391,15 @@ function main_init() { } catch(E) { G.data.adv_pane = null; } + try { + G.data.browse_sort_default = G.pref.getCharPref('open-ils.' + url + '.browse_sort_default'); + } catch(E) { + G.data.browse_sort_default = null; + } G.data.stash('search_lib'); G.data.stash('pref_lib'); G.data.stash('adv_pane'); + G.data.stash('browse_sort_default'); if (! url.match( '^(http|https)://' ) ) { url = 'http://' + url; } diff --git a/Open-ILS/xul/staff_client/chrome/content/main/search_prefs.xul b/Open-ILS/xul/staff_client/chrome/content/main/search_prefs.xul index e8f93ed681..374dd4f954 100644 --- a/Open-ILS/xul/staff_client/chrome/content/main/search_prefs.xul +++ b/Open-ILS/xul/staff_client/chrome/content/main/search_prefs.xul @@ -27,6 +27,7 @@ var search_lib_box; var pref_lib_box; var adv_pane_box; + var browse_sort_default_box; var prefSvc; var data; var saved_message; @@ -38,6 +39,7 @@ search_lib_box = document.getElementById('search_lib'); pref_lib_box = document.getElementById('pref_lib'); adv_pane_box = document.getElementById('adv_pane'); + browse_sort_default_box = document.getElementById('browse_sort_default'); // NOTE: If this ever deals with custom trees, this is where you likely want to mess with things. add_aou(data.tree.aou, ''); prefSvc = Components.classes["@mozilla.org/preferences-service;1"].getService(Components.interfaces.nsIPrefBranch).QueryInterface(Components.interfaces.nsIPrefBranch2); @@ -58,6 +60,11 @@ } else { adv_pane_box.selectedIndex = 0; } + if (data.browse_sort_default) { + browse_sort_default_box.value = data.browse_sort_default; + } else { + browse_sort_default_box.selectedIndex = 0; + } } function add_aou(aou, depth) { if (aou.children().length > 0) { @@ -76,24 +83,30 @@ prefSvc.setIntPref('open-ils.' + data.server_unadorned + '.search_lib', search_lib_box.value); prefSvc.setIntPref('open-ils.' + data.server_unadorned + '.pref_lib', pref_lib_box.value); prefSvc.setCharPref('open-ils.' + data.server_unadorned + '.adv_pane', adv_pane_box.value); + prefSvc.setCharPref('open-ils.' + data.server_unadorned + '.browse_sort_default', browse_sort_default_box.value); data.search_lib = search_lib_box.value; data.pref_lib = pref_lib_box.value; data.adv_pane = adv_pane_box.value; + data.browse_sort_default = browse_sort_default_box.value; data.stash('search_lib'); data.stash('pref_lib'); data.stash('adv_pane'); + data.stash('browse_sort_default'); alert(saved_message); } function clear_prefs() { prefSvc.clearUserPref('open-ils.' + data.server_unadorned + '.search_lib'); prefSvc.clearUserPref('open-ils.' + data.server_unadorned + '.pref_lib'); prefSvc.clearUserPref('open-ils.' + data.server_unadorned + '.adv_pane'); + prefSvc.clearUserPref('open-ils.' + data.server_unadorned + '.browse_sort_default'); data.search_lib = null; data.pref_lib = null; data.adv_pane = null; + data.browse_sort_default = null; data.stash('search_lib'); data.stash('pref_lib'); data.stash('adv_pane'); + data.stash('browse_sort_default'); alert(cleared_message); } ]]> @@ -125,11 +138,26 @@ + + +
[% l('Search the Catalog') %] - [% l('Advanced Search') %] + [% IF took_care_of_form %] + [% l('Advanced Search') %] + [% ELSE %] + [% l('Advanced Search') %] + [% END %] + | + + [% l('Browse the Catalog') %] +
+ &staff.search_prefs.browse_sort_default.description; + + + + + + + + + + +