From 2bf976ebd53c4712fdf1d3feb9cf14278cc04a84 Mon Sep 17 00:00:00 2001 From: Chris Sharp Date: Sat, 16 Mar 2013 20:00:13 -0400 Subject: [PATCH] removing a reingest process from the 2.1.1-2.1.2 script --- .../pines-2.1.1-2.1.2-upgrade-db.sql | 208 +++++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 Open-ILS/src/sql/Pg/version-upgrade/pines-2.1.1-2.1.2-upgrade-db.sql diff --git a/Open-ILS/src/sql/Pg/version-upgrade/pines-2.1.1-2.1.2-upgrade-db.sql b/Open-ILS/src/sql/Pg/version-upgrade/pines-2.1.1-2.1.2-upgrade-db.sql new file mode 100644 index 0000000000..5054e81be0 --- /dev/null +++ b/Open-ILS/src/sql/Pg/version-upgrade/pines-2.1.1-2.1.2-upgrade-db.sql @@ -0,0 +1,208 @@ +--Upgrade Script for 2.1.1 to 2.1.2 +BEGIN; +INSERT INTO config.upgrade_log (version) VALUES ('2.1.2'); + +CREATE OR REPLACE FUNCTION biblio.extract_located_uris( bib_id BIGINT, marcxml TEXT, editor_id INT ) RETURNS VOID AS $func$ +DECLARE + uris TEXT[]; + uri_xml TEXT; + uri_label TEXT; + uri_href TEXT; + uri_use TEXT; + uri_owner_list TEXT[]; + uri_owner TEXT; + uri_owner_id INT; + uri_id INT; + uri_cn_id INT; + uri_map_id INT; +BEGIN + + -- Clear any URI mappings and call numbers for this bib. + -- This leads to acn / auricnm inflation, but also enables + -- old acn/auricnm's to go away and for bibs to be deleted. + FOR uri_cn_id IN SELECT id FROM asset.call_number WHERE record = bib_id AND label = '##URI##' AND NOT deleted LOOP + DELETE FROM asset.uri_call_number_map WHERE call_number = uri_cn_id; + DELETE FROM asset.call_number WHERE id = uri_cn_id; + END LOOP; + + uris := oils_xpath('//*[@tag="856" and (@ind1="4" or @ind1="1") and (@ind2="0" or @ind2="1")]',marcxml); + IF ARRAY_UPPER(uris,1) > 0 THEN + FOR i IN 1 .. ARRAY_UPPER(uris, 1) LOOP + -- First we pull info out of the 856 + uri_xml := uris[i]; + + uri_href := (oils_xpath('//*[@code="u"]/text()',uri_xml))[1]; + uri_label := (oils_xpath('//*[@code="y"]/text()|//*[@code="3"]/text()',uri_xml))[1]; + uri_use := (oils_xpath('//*[@code="z"]/text()|//*[@code="2"]/text()|//*[@code="n"]/text()',uri_xml))[1]; + + IF uri_label IS NULL THEN + uri_label := uri_href; + END IF; + CONTINUE WHEN uri_href IS NULL; + + -- Get the distinct list of libraries wanting to use + SELECT ARRAY_ACCUM( + DISTINCT REGEXP_REPLACE( + x, + $re$^.*?\((\w+)\).*$$re$, + E'\\1' + ) + ) INTO uri_owner_list + FROM UNNEST( + oils_xpath( + '//*[@code="9"]/text()|//*[@code="w"]/text()|//*[@code="n"]/text()', + uri_xml + ) + )x; + + IF ARRAY_UPPER(uri_owner_list,1) > 0 THEN + + -- look for a matching uri + IF uri_use IS NULL THEN + SELECT id INTO uri_id + FROM asset.uri + WHERE label = uri_label AND href = uri_href AND use_restriction IS NULL AND active + ORDER BY id LIMIT 1; + IF NOT FOUND THEN -- create one + INSERT INTO asset.uri (label, href, use_restriction) VALUES (uri_label, uri_href, uri_use); + SELECT id INTO uri_id + FROM asset.uri + WHERE label = uri_label AND href = uri_href AND use_restriction IS NULL AND active; + END IF; + ELSE + SELECT id INTO uri_id + FROM asset.uri + WHERE label = uri_label AND href = uri_href AND use_restriction = uri_use AND active + ORDER BY id LIMIT 1; + IF NOT FOUND THEN -- create one + INSERT INTO asset.uri (label, href, use_restriction) VALUES (uri_label, uri_href, uri_use); + SELECT id INTO uri_id + FROM asset.uri + WHERE label = uri_label AND href = uri_href AND use_restriction = uri_use AND active; + END IF; + END IF; + + FOR j IN 1 .. ARRAY_UPPER(uri_owner_list, 1) LOOP + uri_owner := uri_owner_list[j]; + + SELECT id INTO uri_owner_id FROM actor.org_unit WHERE shortname = uri_owner; + CONTINUE WHEN NOT FOUND; + + -- we need a call number to link through + SELECT id INTO uri_cn_id FROM asset.call_number WHERE owning_lib = uri_owner_id AND record = bib_id AND label = '##URI##' AND NOT deleted; + IF NOT FOUND THEN + INSERT INTO asset.call_number (owning_lib, record, create_date, edit_date, creator, editor, label) + VALUES (uri_owner_id, bib_id, 'now', 'now', editor_id, editor_id, '##URI##'); + SELECT id INTO uri_cn_id FROM asset.call_number WHERE owning_lib = uri_owner_id AND record = bib_id AND label = '##URI##' AND NOT deleted; + END IF; + + -- now, link them if they're not already + SELECT id INTO uri_map_id FROM asset.uri_call_number_map WHERE call_number = uri_cn_id AND uri = uri_id; + IF NOT FOUND THEN + INSERT INTO asset.uri_call_number_map (call_number, uri) VALUES (uri_cn_id, uri_id); + END IF; + + END LOOP; + + END IF; + + END LOOP; + END IF; + + RETURN; +END; +$func$ LANGUAGE PLPGSQL; + + +INSERT INTO config.upgrade_log (version) VALUES ('0658'); + +CREATE OR REPLACE FUNCTION asset.label_normalizer_dewey(TEXT) RETURNS TEXT AS $func$ + # Derived from the Koha C4::ClassSortRoutine::Dewey module + # Copyright (C) 2007 LibLime + # Licensed under the GPL v2 or later + + use strict; + use warnings; + + my $init = uc(shift); + $init =~ s/^\s+//; + $init =~ s/\s+$//; + $init =~ s!/!!g; + $init =~ s/^([\p{IsAlpha}]+)/$1 /; + my @tokens = split /\.|\s+/, $init; + my $digit_group_count = 0; + for (my $i = 0; $i <= $#tokens; $i++) { + if ($tokens[$i] =~ /^\d+$/) { + $digit_group_count++; + if (2 == $digit_group_count) { + $tokens[$i] = sprintf("%-15.15s", $tokens[$i]); + $tokens[$i] =~ tr/ /0/; + } + } + } + # Pad the first digit_group if there was only one + if (1 == $digit_group_count) { + $tokens[0] .= '_000000000000000' + } + my $key = join("_", @tokens); + $key =~ s/[^\p{IsAlnum}_]//g; + + return $key; + +$func$ LANGUAGE PLPERLU; + +INSERT INTO config.upgrade_log (version) VALUES ('0693'); + +-- Delete the index normalizer that was meant to remove spaces from ISSNs +-- but ended up breaking records with multiple ISSNs +DELETE FROM config.metabib_field_index_norm_map WHERE id IN ( + SELECT map.id FROM config.metabib_field_index_norm_map map + INNER JOIN config.metabib_field cmf ON cmf.id = map.field + INNER JOIN config.index_normalizer cin ON cin.id = map.norm + WHERE cin.func = 'replace' + AND cmf.field_class = 'identifier' + AND cmf.name = 'issn' + AND map.params = $$[" ",""]$$ +); + +COMMIT; + +\qecho We will attempt to create indexes that may have already been +\qecho created if you upgraded to 2.0.11. You might see failures +\qecho as a result. +-- Placeholder for backported fix +INSERT INTO config.upgrade_log (version) VALUES ('0691'); + +CREATE INDEX poi_po_idx ON acq.po_item (purchase_order); + +CREATE INDEX ie_inv_idx on acq.invoice_entry (invoice); +CREATE INDEX ie_po_idx on acq.invoice_entry (purchase_order); +CREATE INDEX ie_li_idx on acq.invoice_entry (lineitem); + +CREATE INDEX ii_inv_idx on acq.invoice_item (invoice); +CREATE INDEX ii_po_idx on acq.invoice_item (purchase_order); +CREATE INDEX ii_poi_idx on acq.invoice_item (po_item); + +\qecho Finished schema updates; now updating the indexes for +\qecho Dewey call numbers and ISSNs + +-- regenerate sort keys for any dewey call numbers +UPDATE asset.call_number SET id = id WHERE label_class = 2; + +-- Reindex records that have more than just a single ISSN +-- to ensure that spaces are maintained +--SELECT metabib.reingest_metabib_field_entries(source) +-- FROM metabib.identifier_field_entry mife +-- INNER JOIN config.metabib_field cmf ON cmf.id = mife.field +-- WHERE cmf.field_class = 'identifier' +-- AND cmf.name = 'issn' +-- AND char_length(value) > 9 +--; + +-- Fix sorting by pubdate by ensuring migrated records +-- have a pubdate attribute in metabib.record_attr.attrs +UPDATE metabib.record_attr + SET attrs = attrs || ('pubdate' => (attrs->'date1')) + WHERE defined(attrs, 'pubdate') IS FALSE + AND defined(attrs, 'date1') IS TRUE; + -- 2.11.0