\qecho release upgrade scripts. Press <ENTER> to continue, or hit <CTRL-C> to
\qecho prevent the upgrade from happening.
\qecho **********************************************************************
-\prompt chance-to-quit
\qecho The following statement might fail, and that is okay; we are
\qecho ensuring that an upgrade that should have been applied during
-- check whether patch can be applied
SELECT evergreen.upgrade_deps_block_check('0722', :eg_version);
-ALTER TABLE acq.purchase_order ADD CONSTRAINT valid_po_state
- CHECK (state IN ('new','pending','on-order','received','cancelled'));
+-- ALTER TABLE acq.purchase_order ADD CONSTRAINT valid_po_state
+-- CHECK (state IN ('new','pending','on-order','received','cancelled'));
-- Evergreen DB patch 0723.schema.function.get_locale_name.sql
--
ON metabib.real_full_rec (tag, subfield, LOWER(substring(value, 1, 1024)))
WHERE tag IN ('020', '022', '024');
-
-
-SELECT evergreen.upgrade_deps_block_check('0740', :eg_version);
-
-CREATE OR REPLACE
- FUNCTION metabib.suggest_browse_entries(
- raw_query_text TEXT, -- actually typed by humans at the UI level
- search_class TEXT, -- 'alias' or 'class' or 'class|field..', etc
- headline_opts TEXT, -- markup options for ts_headline()
- visibility_org INTEGER,-- null if you don't want opac visibility test
- query_limit INTEGER,-- use in LIMIT clause of interal query
- normalization INTEGER -- argument to TS_RANK_CD()
- ) RETURNS TABLE (
- value TEXT, -- plain
- field INTEGER,
- buoyant_and_class_match BOOL,
- field_match BOOL,
- field_weight INTEGER,
- rank REAL,
- buoyant BOOL,
- match TEXT -- marked up
- ) AS $func$
-DECLARE
- prepared_query_texts TEXT[];
- query TSQUERY;
- plain_query TSQUERY;
- opac_visibility_join TEXT;
- search_class_join TEXT;
- r_fields RECORD;
-BEGIN
- prepared_query_texts := metabib.autosuggest_prepare_tsquery(raw_query_text);
-
- query := TO_TSQUERY('keyword', prepared_query_texts[1]);
- plain_query := TO_TSQUERY('keyword', prepared_query_texts[2]);
-
- visibility_org := NULLIF(visibility_org,-1);
- IF visibility_org IS NOT NULL THEN
- opac_visibility_join := '
- JOIN asset.opac_visible_copies aovc ON (
- aovc.record = x.source AND
- aovc.circ_lib IN (SELECT id FROM actor.org_unit_descendants($4))
- )';
- ELSE
- opac_visibility_join := '';
- END IF;
-
- -- The following determines whether we only provide suggestsons matching
- -- the user's selected search_class, or whether we show other suggestions
- -- too. The reason for MIN() is that for search_classes like
- -- 'title|proper|uniform' you would otherwise get multiple rows. The
- -- implication is that if title as a class doesn't have restrict,
- -- nor does the proper field, but the uniform field does, you're going
- -- to get 'false' for your overall evaluation of 'should we restrict?'
- -- To invert that, change from MIN() to MAX().
-
- SELECT
- INTO r_fields
- MIN(cmc.restrict::INT) AS restrict_class,
- MIN(cmf.restrict::INT) AS restrict_field
- FROM metabib.search_class_to_registered_components(search_class)
- AS _registered (field_class TEXT, field INT)
- JOIN
- config.metabib_class cmc ON (cmc.name = _registered.field_class)
- LEFT JOIN
- config.metabib_field cmf ON (cmf.id = _registered.field);
-
- -- evaluate 'should we restrict?'
- IF r_fields.restrict_field::BOOL OR r_fields.restrict_class::BOOL THEN
- search_class_join := '
- JOIN
- metabib.search_class_to_registered_components($2)
- AS _registered (field_class TEXT, field INT) ON (
- (_registered.field IS NULL AND
- _registered.field_class = cmf.field_class) OR
- (_registered.field = cmf.id)
- )
- ';
- ELSE
- search_class_join := '
- LEFT JOIN
- metabib.search_class_to_registered_components($2)
- AS _registered (field_class TEXT, field INT) ON (
- _registered.field_class = cmc.name
- )
- ';
- END IF;
-
- RETURN QUERY EXECUTE '
-SELECT DISTINCT
- x.value,
- x.id,
- x.push,
- x.restrict,
- x.weight,
- x.ts_rank_cd,
- x.buoyant,
- TS_HEADLINE(value, $7, $3)
- FROM (SELECT DISTINCT
- mbe.value,
- cmf.id,
- cmc.buoyant AND _registered.field_class IS NOT NULL AS push,
- _registered.field = cmf.id AS restrict,
- cmf.weight,
- TS_RANK_CD(mbe.index_vector, $1, $6),
- cmc.buoyant,
- mbedm.source
- FROM metabib.browse_entry_def_map mbedm
- JOIN (SELECT * FROM metabib.browse_entry WHERE index_vector @@ $1 LIMIT 10000) mbe ON (mbe.id = mbedm.entry)
- JOIN config.metabib_field cmf ON (cmf.id = mbedm.def)
- JOIN config.metabib_class cmc ON (cmf.field_class = cmc.name)
- ' || search_class_join || '
- ORDER BY 3 DESC, 4 DESC NULLS LAST, 5 DESC, 6 DESC, 7 DESC, 1 ASC
- LIMIT 1000) AS x
- ' || opac_visibility_join || '
- ORDER BY 3 DESC, 4 DESC NULLS LAST, 5 DESC, 6 DESC, 7 DESC, 1 ASC
- LIMIT $5
-' -- sic, repeat the order by clause in the outer select too
- USING
- query, search_class, headline_opts,
- visibility_org, query_limit, normalization, plain_query
- ;
-
- -- sort order:
- -- buoyant AND chosen class = match class
- -- chosen field = match field
- -- field weight
- -- rank
- -- buoyancy
- -- value itself
-
-END;
-$func$ LANGUAGE PLPGSQL;
-
-
-SELECT evergreen.upgrade_deps_block_check('0742', :eg_version);
-
--- Prepare for the July 2013 introduction of OCLC's "on" prefix
--- Per LP# 1049171
-
-CREATE OR REPLACE FUNCTION maintain_control_numbers() RETURNS TRIGGER AS $func$
-use strict;
-use MARC::Record;
-use MARC::File::XML (BinaryEncoding => 'UTF-8');
-use MARC::Charset;
-use Encode;
-use Unicode::Normalize;
-
-MARC::Charset->assume_unicode(1);
-
-my $record = MARC::Record->new_from_xml($_TD->{new}{marc});
-my $schema = $_TD->{table_schema};
-my $rec_id = $_TD->{new}{id};
-
-# Short-circuit if maintaining control numbers per MARC21 spec is not enabled
-my $enable = spi_exec_query("SELECT enabled FROM config.global_flag WHERE name = 'cat.maintain_control_numbers'");
-if (!($enable->{processed}) or $enable->{rows}[0]->{enabled} eq 'f') {
- return;
-}
-
-# Get the control number identifier from an OU setting based on $_TD->{new}{owner}
-my $ou_cni = 'EVRGRN';
-
-my $owner;
-if ($schema eq 'serial') {
- $owner = $_TD->{new}{owning_lib};
-} else {
- # are.owner and bre.owner can be null, so fall back to the consortial setting
- $owner = $_TD->{new}{owner} || 1;
-}
-
-my $ous_rv = spi_exec_query("SELECT value FROM actor.org_unit_ancestor_setting('cat.marc_control_number_identifier', $owner)");
-if ($ous_rv->{processed}) {
- $ou_cni = $ous_rv->{rows}[0]->{value};
- $ou_cni =~ s/"//g; # Stupid VIM syntax highlighting"
-} else {
- # Fall back to the shortname of the OU if there was no OU setting
- $ous_rv = spi_exec_query("SELECT shortname FROM actor.org_unit WHERE id = $owner");
- if ($ous_rv->{processed}) {
- $ou_cni = $ous_rv->{rows}[0]->{shortname};
- }
-}
-
-my ($create, $munge) = (0, 0);
-
-my @scns = $record->field('035');
-
-foreach my $id_field ('001', '003') {
- my $spec_value;
- my @controls = $record->field($id_field);
-
- if ($id_field eq '001') {
- $spec_value = $rec_id;
- } else {
- $spec_value = $ou_cni;
- }
-
- # Create the 001/003 if none exist
- if (scalar(@controls) == 1) {
- # Only one field; check to see if we need to munge it
- unless (grep $_->data() eq $spec_value, @controls) {
- $munge = 1;
- }
- } else {
- # Delete the other fields, as with more than 1 001/003 we do not know which 003/001 to match
- foreach my $control (@controls) {
- $record->delete_field($control);
- }
- $record->insert_fields_ordered(MARC::Field->new($id_field, $spec_value));
- $create = 1;
- }
-}
-
-my $cn = $record->field('001')->data();
-# Special handling of OCLC numbers, often found in records that lack 003
-if ($cn =~ /^o(c[nm]|n)\d/) {
- $cn =~ s/^o(c[nm]|n)0*(\d+)/$2/;
- $record->field('003')->data('OCoLC');
- $create = 0;
-}
-
-# Now, if we need to munge the 001, we will first push the existing 001/003
-# into the 035; but if the record did not have one (and one only) 001 and 003
-# to begin with, skip this process
-if ($munge and not $create) {
-
- my $scn = "(" . $record->field('003')->data() . ")" . $cn;
-
- # Do not create duplicate 035 fields
- unless (grep $_->subfield('a') eq $scn, @scns) {
- $record->insert_fields_ordered(MARC::Field->new('035', '', '', 'a' => $scn));
- }
-}
-
-# Set the 001/003 and update the MARC
-if ($create or $munge) {
- $record->field('001')->data($rec_id);
- $record->field('003')->data($ou_cni);
-
- my $xml = $record->as_xml_record();
- $xml =~ s/\n//sgo;
- $xml =~ s/^<\?xml.+\?\s*>//go;
- $xml =~ s/>\s+</></go;
- $xml =~ s/\p{Cc}//go;
-
- # Embed a version of OpenILS::Application::AppUtils->entityize()
- # to avoid having to set PERL5LIB for PostgreSQL as well
-
- # If we are going to convert non-ASCII characters to XML entities,
- # we had better be dealing with a UTF8 string to begin with
- $xml = decode_utf8($xml);
-
- $xml = NFC($xml);
-
- # Convert raw ampersands to entities
- $xml =~ s/&(?!\S+;)/&/gso;
-
- # Convert Unicode characters to entities
- $xml =~ s/([\x{0080}-\x{fffd}])/sprintf('&#x%X;',ord($1))/sgoe;
-
- $xml =~ s/[\x00-\x1f]//go;
- $_TD->{new}{marc} = $xml;
-
- return "MODIFY";
-}
-
-return;
-$func$ LANGUAGE PLPERLU;
-
COMMIT;
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.2.4', :eg_version);
-SELECT evergreen.upgrade_deps_block_check('0744', :eg_version);
-
-INSERT INTO config.org_unit_setting_type
- (name, grp, label, description, datatype)
- VALUES (
- 'circ.lost.xact_open_on_zero',
- 'finance',
- oils_i18n_gettext(
- 'circ.lost.xact_open_on_zero',
- 'Leave transaction open when lost balance equals zero',
- 'coust',
- 'label'
- ),
- oils_i18n_gettext(
- 'circ.lost.xact_open_on_zero',
- 'Leave transaction open when lost balance equals zero. This leaves the lost copy on the patron record when it is paid',
- 'coust',
- 'description'
- ),
- 'bool'
- );
-
-
-SELECT evergreen.upgrade_deps_block_check('0746', :eg_version);
-
-ALTER TABLE action.hold_request ALTER COLUMN email_notify SET DEFAULT 'false';
-
COMMIT;
\set eg_version '''2.2.5'''
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.2.5', :eg_version);
--- Only consider main entry headings for bib overlay
-
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0750', :eg_version);
-
-
--- Function to generate an ephemeral overlay template from an authority record
-CREATE OR REPLACE FUNCTION authority.generate_overlay_template (source_xml TEXT) RETURNS TEXT AS $f$
-DECLARE
- cset INT;
- main_entry authority.control_set_authority_field%ROWTYPE;
- bib_field authority.control_set_bib_field%ROWTYPE;
- auth_id INT DEFAULT oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', source_xml)::INT;
- replace_data XML[] DEFAULT '{}'::XML[];
- replace_rules TEXT[] DEFAULT '{}'::TEXT[];
- auth_field XML[];
-BEGIN
- IF auth_id IS NULL THEN
- RETURN NULL;
- END IF;
-
- -- Default to the LoC controll set
- SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id;
-
- -- if none, make a best guess
- IF cset IS NULL THEN
- SELECT control_set INTO cset
- FROM authority.control_set_authority_field
- WHERE tag IN (
- SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marc::XML)::TEXT[])
- FROM authority.record_entry
- WHERE id = auth_id
- )
- LIMIT 1;
- END IF;
-
- -- if STILL none, no-op change
- IF cset IS NULL THEN
- RETURN XMLELEMENT(
- name record,
- XMLATTRIBUTES('http://www.loc.gov/MARC21/slim' AS xmlns),
- XMLELEMENT( name leader, '00881nam a2200193 4500'),
- XMLELEMENT(
- name datafield,
- XMLATTRIBUTES( '905' AS tag, ' ' AS ind1, ' ' AS ind2),
- XMLELEMENT(
- name subfield,
- XMLATTRIBUTES('d' AS code),
- '901c'
- )
- )
- )::TEXT;
- END IF;
-
- FOR main_entry IN SELECT * FROM authority.control_set_authority_field acsaf WHERE acsaf.control_set = cset AND acsaf.main_entry IS NULL LOOP
- auth_field := XPATH('//*[@tag="'||main_entry.tag||'"][1]',source_xml::XML);
- IF ARRAY_LENGTH(auth_field,1) > 0 THEN
- FOR bib_field IN SELECT * FROM authority.control_set_bib_field WHERE authority_field = main_entry.id LOOP
- replace_data := replace_data || XMLELEMENT( name datafield, XMLATTRIBUTES(bib_field.tag AS tag), XPATH('//*[local-name()="subfield"]',auth_field[1])::XML[]);
- replace_rules := replace_rules || ( bib_field.tag || main_entry.sf_list || E'[0~\\)' || auth_id || '$]' );
- END LOOP;
- EXIT;
- END IF;
- END LOOP;
-
- RETURN XMLELEMENT(
- name record,
- XMLATTRIBUTES('http://www.loc.gov/MARC21/slim' AS xmlns),
- XMLELEMENT( name leader, '00881nam a2200193 4500'),
- replace_data,
- XMLELEMENT(
- name datafield,
- XMLATTRIBUTES( '905' AS tag, ' ' AS ind1, ' ' AS ind2),
- XMLELEMENT(
- name subfield,
- XMLATTRIBUTES('r' AS code),
- ARRAY_TO_STRING(replace_rules,',')
- )
- )
- )::TEXT;
-END;
-$f$ STABLE LANGUAGE PLPGSQL;
-
--- Change the two argument form of vandelay.merge_record_xml to
--- prevent bibliographic record destruction when there is nothing to
--- do.
-
-
-CREATE OR REPLACE FUNCTION vandelay.merge_record_xml ( target_marc TEXT, template_marc TEXT ) RETURNS TEXT AS $$
-DECLARE
- dyn_profile vandelay.compile_profile%ROWTYPE;
- replace_rule TEXT;
- tmp_marc TEXT;
- trgt_marc TEXT;
- tmpl_marc TEXT;
- match_count INT;
-BEGIN
-
- IF target_marc IS NULL OR template_marc IS NULL THEN
- -- RAISE NOTICE 'no marc for target or template record';
- RETURN NULL;
- END IF;
-
- dyn_profile := vandelay.compile_profile( template_marc );
-
- IF dyn_profile.replace_rule <> '' AND dyn_profile.preserve_rule <> '' THEN
- -- RAISE NOTICE 'both replace [%] and preserve [%] specified', dyn_profile.replace_rule, dyn_profile.preserve_rule;
- RETURN NULL;
- END IF;
-
- IF dyn_profile.replace_rule = '' AND dyn_profile.preserve_rule = '' AND dyn_profile.add_rule = '' THEN
- --Since we have nothing to do, just return what we were given.
- RETURN target_marc;
- ELSIF dyn_profile.replace_rule <> '' THEN
- trgt_marc = target_marc;
- tmpl_marc = template_marc;
- replace_rule = dyn_profile.replace_rule;
- ELSE
- tmp_marc = target_marc;
- trgt_marc = template_marc;
- tmpl_marc = tmp_marc;
- replace_rule = dyn_profile.preserve_rule;
- END IF;
-
- RETURN vandelay.merge_record_xml( trgt_marc, tmpl_marc, dyn_profile.add_rule, replace_rule, dyn_profile.strip_rule );
-
-END;
-$$ LANGUAGE PLPGSQL;
-
-CREATE OR REPLACE FUNCTION vandelay.template_overlay_bib_record ( v_marc TEXT, eg_id BIGINT, merge_profile_id INT ) RETURNS BOOL AS $$
-DECLARE
- merge_profile vandelay.merge_profile%ROWTYPE;
- dyn_profile vandelay.compile_profile%ROWTYPE;
- editor_string TEXT;
- editor_id INT;
- source_marc TEXT;
- target_marc TEXT;
- eg_marc TEXT;
- replace_rule TEXT;
- match_count INT;
-BEGIN
-
- SELECT b.marc INTO eg_marc
- FROM biblio.record_entry b
- WHERE b.id = eg_id
- LIMIT 1;
-
- IF eg_marc IS NULL OR v_marc IS NULL THEN
- -- RAISE NOTICE 'no marc for template or bib record';
- RETURN FALSE;
- END IF;
-
- dyn_profile := vandelay.compile_profile( v_marc );
-
- IF merge_profile_id IS NOT NULL THEN
- SELECT * INTO merge_profile FROM vandelay.merge_profile WHERE id = merge_profile_id;
- IF FOUND THEN
- dyn_profile.add_rule := BTRIM( dyn_profile.add_rule || ',' || COALESCE(merge_profile.add_spec,''), ',');
- dyn_profile.strip_rule := BTRIM( dyn_profile.strip_rule || ',' || COALESCE(merge_profile.strip_spec,''), ',');
- dyn_profile.replace_rule := BTRIM( dyn_profile.replace_rule || ',' || COALESCE(merge_profile.replace_spec,''), ',');
- dyn_profile.preserve_rule := BTRIM( dyn_profile.preserve_rule || ',' || COALESCE(merge_profile.preserve_spec,''), ',');
- END IF;
- END IF;
-
- IF dyn_profile.replace_rule <> '' AND dyn_profile.preserve_rule <> '' THEN
- -- RAISE NOTICE 'both replace [%] and preserve [%] specified', dyn_profile.replace_rule, dyn_profile.preserve_rule;
- RETURN FALSE;
- END IF;
-
- IF dyn_profile.replace_rule = '' AND dyn_profile.preserve_rule = '' AND dyn_profile.add_rule = '' AND dyn_profile.strip_rule = '' THEN
- --Since we have nothing to do, just return a NOOP "we did it"
- RETURN TRUE;
- ELSIF dyn_profile.replace_rule <> '' THEN
- source_marc = v_marc;
- target_marc = eg_marc;
- replace_rule = dyn_profile.replace_rule;
- ELSE
- source_marc = eg_marc;
- target_marc = v_marc;
- replace_rule = dyn_profile.preserve_rule;
- END IF;
-
- UPDATE biblio.record_entry
- SET marc = vandelay.merge_record_xml( target_marc, source_marc, dyn_profile.add_rule, replace_rule, dyn_profile.strip_rule )
- WHERE id = eg_id;
-
- IF NOT FOUND THEN
- -- RAISE NOTICE 'update of biblio.record_entry failed';
- RETURN FALSE;
- END IF;
-
- RETURN TRUE;
-
-END;
-$$ LANGUAGE PLPGSQL;
-
-CREATE OR REPLACE FUNCTION vandelay.overlay_authority_record ( import_id BIGINT, eg_id BIGINT, merge_profile_id INT ) RETURNS BOOL AS $$
-DECLARE
- merge_profile vandelay.merge_profile%ROWTYPE;
- dyn_profile vandelay.compile_profile%ROWTYPE;
- source_marc TEXT;
- target_marc TEXT;
- eg_marc TEXT;
- v_marc TEXT;
- replace_rule TEXT;
- match_count INT;
-BEGIN
-
- SELECT b.marc INTO eg_marc
- FROM authority.record_entry b
- JOIN vandelay.authority_match m ON (m.eg_record = b.id AND m.queued_record = import_id)
- LIMIT 1;
-
- SELECT q.marc INTO v_marc
- FROM vandelay.queued_record q
- JOIN vandelay.authority_match m ON (m.queued_record = q.id AND q.id = import_id)
- LIMIT 1;
-
- IF eg_marc IS NULL OR v_marc IS NULL THEN
- -- RAISE NOTICE 'no marc for vandelay or authority record';
- RETURN FALSE;
- END IF;
-
- dyn_profile := vandelay.compile_profile( v_marc );
-
- IF merge_profile_id IS NOT NULL THEN
- SELECT * INTO merge_profile FROM vandelay.merge_profile WHERE id = merge_profile_id;
- IF FOUND THEN
- dyn_profile.add_rule := BTRIM( dyn_profile.add_rule || ',' || COALESCE(merge_profile.add_spec,''), ',');
- dyn_profile.strip_rule := BTRIM( dyn_profile.strip_rule || ',' || COALESCE(merge_profile.strip_spec,''), ',');
- dyn_profile.replace_rule := BTRIM( dyn_profile.replace_rule || ',' || COALESCE(merge_profile.replace_spec,''), ',');
- dyn_profile.preserve_rule := BTRIM( dyn_profile.preserve_rule || ',' || COALESCE(merge_profile.preserve_spec,''), ',');
- END IF;
- END IF;
-
- IF dyn_profile.replace_rule <> '' AND dyn_profile.preserve_rule <> '' THEN
- -- RAISE NOTICE 'both replace [%] and preserve [%] specified', dyn_profile.replace_rule, dyn_profile.preserve_rule;
- RETURN FALSE;
- END IF;
-
- IF dyn_profile.replace_rule = '' AND dyn_profile.preserve_rule = '' AND dyn_profile.add_rule = '' AND dyn_profile.strip_rule = '' THEN
- --Since we have nothing to do, just return a NOOP "we did it"
- RETURN TRUE;
- ELSIF dyn_profile.replace_rule <> '' THEN
- source_marc = v_marc;
- target_marc = eg_marc;
- replace_rule = dyn_profile.replace_rule;
- ELSE
- source_marc = eg_marc;
- target_marc = v_marc;
- replace_rule = dyn_profile.preserve_rule;
- END IF;
-
- UPDATE authority.record_entry
- SET marc = vandelay.merge_record_xml( target_marc, source_marc, dyn_profile.add_rule, replace_rule, dyn_profile.strip_rule )
- WHERE id = eg_id;
-
- IF FOUND THEN
- UPDATE vandelay.queued_authority_record
- SET imported_as = eg_id,
- import_time = NOW()
- WHERE id = import_id;
- RETURN TRUE;
- END IF;
-
- -- RAISE NOTICE 'update of authority.record_entry failed';
-
- RETURN FALSE;
-
-END;
-$$ LANGUAGE PLPGSQL;
COMMIT;
-- Evergreen DB patch XXXX.function.merge_record_assets_deleted_call_numbers.sql
--
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0761', :eg_version);
-
-CREATE OR REPLACE FUNCTION asset.merge_record_assets( target_record BIGINT, source_record BIGINT ) RETURNS INT AS $func$
-DECLARE
- moved_objects INT := 0;
- source_cn asset.call_number%ROWTYPE;
- target_cn asset.call_number%ROWTYPE;
- metarec metabib.metarecord%ROWTYPE;
- hold action.hold_request%ROWTYPE;
- ser_rec serial.record_entry%ROWTYPE;
- ser_sub serial.subscription%ROWTYPE;
- acq_lineitem acq.lineitem%ROWTYPE;
- acq_request acq.user_request%ROWTYPE;
- booking booking.resource_type%ROWTYPE;
- source_part biblio.monograph_part%ROWTYPE;
- target_part biblio.monograph_part%ROWTYPE;
- multi_home biblio.peer_bib_copy_map%ROWTYPE;
- uri_count INT := 0;
- counter INT := 0;
- uri_datafield TEXT;
- uri_text TEXT := '';
-BEGIN
-
- -- move any 856 entries on records that have at least one MARC-mapped URI entry
- SELECT INTO uri_count COUNT(*)
- FROM asset.uri_call_number_map m
- JOIN asset.call_number cn ON (m.call_number = cn.id)
- WHERE cn.record = source_record;
-
- IF uri_count > 0 THEN
-
- -- This returns more nodes than you might expect:
- -- 7 instead of 1 for an 856 with $u $y $9
- SELECT COUNT(*) INTO counter
- FROM oils_xpath_table(
- 'id',
- 'marc',
- 'biblio.record_entry',
- '//*[@tag="856"]',
- 'id=' || source_record
- ) as t(i int,c text);
-
- FOR i IN 1 .. counter LOOP
- SELECT '<datafield xmlns="http://www.loc.gov/MARC21/slim"' ||
- ' tag="856"' ||
- ' ind1="' || FIRST(ind1) || '"' ||
- ' ind2="' || FIRST(ind2) || '">' ||
- array_to_string(
- array_accum(
- '<subfield code="' || subfield || '">' ||
- regexp_replace(
- regexp_replace(
- regexp_replace(data,'&','&','g'),
- '>', '>', 'g'
- ),
- '<', '<', 'g'
- ) || '</subfield>'
- ), ''
- ) || '</datafield>' INTO uri_datafield
- FROM oils_xpath_table(
- 'id',
- 'marc',
- 'biblio.record_entry',
- '//*[@tag="856"][position()=' || i || ']/@ind1|' ||
- '//*[@tag="856"][position()=' || i || ']/@ind2|' ||
- '//*[@tag="856"][position()=' || i || ']/*/@code|' ||
- '//*[@tag="856"][position()=' || i || ']/*[@code]',
- 'id=' || source_record
- ) as t(id int,ind1 text, ind2 text,subfield text,data text);
-
- -- As most of the results will be NULL, protect against NULLifying
- -- the valid content that we do generate
- uri_text := uri_text || COALESCE(uri_datafield, '');
- END LOOP;
-
- IF uri_text <> '' THEN
- UPDATE biblio.record_entry
- SET marc = regexp_replace(marc,'(</[^>]*record>)', uri_text || E'\\1')
- WHERE id = target_record;
- END IF;
-
- END IF;
-
- -- Find and move metarecords to the target record
- SELECT INTO metarec *
- FROM metabib.metarecord
- WHERE master_record = source_record;
-
- IF FOUND THEN
- UPDATE metabib.metarecord
- SET master_record = target_record,
- mods = NULL
- WHERE id = metarec.id;
-
- moved_objects := moved_objects + 1;
- END IF;
-
- -- Find call numbers attached to the source ...
- FOR source_cn IN SELECT * FROM asset.call_number WHERE record = source_record LOOP
-
- SELECT INTO target_cn *
- FROM asset.call_number
- WHERE label = source_cn.label
- AND owning_lib = source_cn.owning_lib
- AND record = target_record
- AND NOT deleted;
-
- -- ... and if there's a conflicting one on the target ...
- IF FOUND THEN
-
- -- ... move the copies to that, and ...
- UPDATE asset.copy
- SET call_number = target_cn.id
- WHERE call_number = source_cn.id;
-
- -- ... move V holds to the move-target call number
- FOR hold IN SELECT * FROM action.hold_request WHERE target = source_cn.id AND hold_type = 'V' LOOP
-
- UPDATE action.hold_request
- SET target = target_cn.id
- WHERE id = hold.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- ... if not ...
- ELSE
- -- ... just move the call number to the target record
- UPDATE asset.call_number
- SET record = target_record
- WHERE id = source_cn.id;
- END IF;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find T holds targeting the source record ...
- FOR hold IN SELECT * FROM action.hold_request WHERE target = source_record AND hold_type = 'T' LOOP
-
- -- ... and move them to the target record
- UPDATE action.hold_request
- SET target = target_record
- WHERE id = hold.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find serial records targeting the source record ...
- FOR ser_rec IN SELECT * FROM serial.record_entry WHERE record = source_record LOOP
- -- ... and move them to the target record
- UPDATE serial.record_entry
- SET record = target_record
- WHERE id = ser_rec.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find serial subscriptions targeting the source record ...
- FOR ser_sub IN SELECT * FROM serial.subscription WHERE record_entry = source_record LOOP
- -- ... and move them to the target record
- UPDATE serial.subscription
- SET record_entry = target_record
- WHERE id = ser_sub.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find booking resource types targeting the source record ...
- FOR booking IN SELECT * FROM booking.resource_type WHERE record = source_record LOOP
- -- ... and move them to the target record
- UPDATE booking.resource_type
- SET record = target_record
- WHERE id = booking.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find acq lineitems targeting the source record ...
- FOR acq_lineitem IN SELECT * FROM acq.lineitem WHERE eg_bib_id = source_record LOOP
- -- ... and move them to the target record
- UPDATE acq.lineitem
- SET eg_bib_id = target_record
- WHERE id = acq_lineitem.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find acq user purchase requests targeting the source record ...
- FOR acq_request IN SELECT * FROM acq.user_request WHERE eg_bib = source_record LOOP
- -- ... and move them to the target record
- UPDATE acq.user_request
- SET eg_bib = target_record
- WHERE id = acq_request.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find parts attached to the source ...
- FOR source_part IN SELECT * FROM biblio.monograph_part WHERE record = source_record LOOP
-
- SELECT INTO target_part *
- FROM biblio.monograph_part
- WHERE label = source_part.label
- AND record = target_record;
-
- -- ... and if there's a conflicting one on the target ...
- IF FOUND THEN
-
- -- ... move the copy-part maps to that, and ...
- UPDATE asset.copy_part_map
- SET part = target_part.id
- WHERE part = source_part.id;
-
- -- ... move P holds to the move-target part
- FOR hold IN SELECT * FROM action.hold_request WHERE target = source_part.id AND hold_type = 'P' LOOP
-
- UPDATE action.hold_request
- SET target = target_part.id
- WHERE id = hold.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- ... if not ...
- ELSE
- -- ... just move the part to the target record
- UPDATE biblio.monograph_part
- SET record = target_record
- WHERE id = source_part.id;
- END IF;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- Find multi_home items attached to the source ...
- FOR multi_home IN SELECT * FROM biblio.peer_bib_copy_map WHERE peer_record = source_record LOOP
- -- ... and move them to the target record
- UPDATE biblio.peer_bib_copy_map
- SET peer_record = target_record
- WHERE id = multi_home.id;
-
- moved_objects := moved_objects + 1;
- END LOOP;
-
- -- And delete mappings where the item's home bib was merged with the peer bib
- DELETE FROM biblio.peer_bib_copy_map WHERE peer_record = (
- SELECT (SELECT record FROM asset.call_number WHERE id = call_number)
- FROM asset.copy WHERE id = target_copy
- );
-
- -- Finally, "delete" the source record
- DELETE FROM biblio.record_entry WHERE id = source_record;
-
- -- That's all, folks!
- RETURN moved_objects;
-END;
-$func$ LANGUAGE plpgsql;
-
-
-SELECT evergreen.upgrade_deps_block_check('0764', :eg_version);
-
-UPDATE config.z3950_source
- SET host = 'lx2.loc.gov', port = 210, db = 'LCDB'
- WHERE name = 'loc'
- AND host = 'z3950.loc.gov'
- AND port = 7090
- AND db = 'Voyager';
-
-UPDATE config.z3950_attr
- SET format = 6
- WHERE source = 'loc'
- AND name = 'lccn'
- AND format = 1;
-
-
-
-SELECT evergreen.upgrade_deps_block_check('0770', :eg_version);
-
-CREATE OR REPLACE FUNCTION evergreen.get_barcodes(select_ou INT, type TEXT, in_barcode TEXT) RETURNS SETOF evergreen.barcode_set AS $$
-DECLARE
- cur_barcode TEXT;
- barcode_len INT;
- completion_len INT;
- asset_barcodes TEXT[];
- actor_barcodes TEXT[];
- do_asset BOOL = false;
- do_serial BOOL = false;
- do_booking BOOL = false;
- do_actor BOOL = false;
- completion_set config.barcode_completion%ROWTYPE;
-BEGIN
-
- IF position('asset' in type) > 0 THEN
- do_asset = true;
- END IF;
- IF position('serial' in type) > 0 THEN
- do_serial = true;
- END IF;
- IF position('booking' in type) > 0 THEN
- do_booking = true;
- END IF;
- IF do_asset OR do_serial OR do_booking THEN
- asset_barcodes = asset_barcodes || in_barcode;
- END IF;
- IF position('actor' in type) > 0 THEN
- do_actor = true;
- actor_barcodes = actor_barcodes || in_barcode;
- END IF;
-
- barcode_len := length(in_barcode);
-
- FOR completion_set IN
- SELECT * FROM config.barcode_completion
- WHERE active
- AND org_unit IN (SELECT aou.id FROM actor.org_unit_ancestors(select_ou) aou)
- LOOP
- IF completion_set.prefix IS NULL THEN
- completion_set.prefix := '';
- END IF;
- IF completion_set.suffix IS NULL THEN
- completion_set.suffix := '';
- END IF;
- IF completion_set.length = 0 OR completion_set.padding IS NULL OR length(completion_set.padding) = 0 THEN
- cur_barcode = completion_set.prefix || in_barcode || completion_set.suffix;
- ELSE
- completion_len = completion_set.length - length(completion_set.prefix) - length(completion_set.suffix);
- IF completion_len >= barcode_len THEN
- IF completion_set.padding_end THEN
- cur_barcode = rpad(in_barcode, completion_len, completion_set.padding);
- ELSE
- cur_barcode = lpad(in_barcode, completion_len, completion_set.padding);
- END IF;
- cur_barcode = completion_set.prefix || cur_barcode || completion_set.suffix;
- END IF;
- END IF;
- IF completion_set.actor THEN
- actor_barcodes = actor_barcodes || cur_barcode;
- END IF;
- IF completion_set.asset THEN
- asset_barcodes = asset_barcodes || cur_barcode;
- END IF;
- END LOOP;
-
- IF do_asset AND do_serial THEN
- RETURN QUERY SELECT 'asset'::TEXT, id, barcode FROM ONLY asset.copy WHERE barcode = ANY(asset_barcodes) AND deleted = false;
- RETURN QUERY SELECT 'serial'::TEXT, id, barcode FROM serial.unit WHERE barcode = ANY(asset_barcodes) AND deleted = false;
- ELSIF do_asset THEN
- RETURN QUERY SELECT 'asset'::TEXT, id, barcode FROM asset.copy WHERE barcode = ANY(asset_barcodes) AND deleted = false;
- ELSIF do_serial THEN
- RETURN QUERY SELECT 'serial'::TEXT, id, barcode FROM serial.unit WHERE barcode = ANY(asset_barcodes) AND deleted = false;
- END IF;
- IF do_booking THEN
- RETURN QUERY SELECT 'booking'::TEXT, id::BIGINT, barcode FROM booking.resource WHERE barcode = ANY(asset_barcodes);
- END IF;
- IF do_actor THEN
- RETURN QUERY SELECT 'actor'::TEXT, c.usr::BIGINT, c.barcode FROM actor.card c JOIN actor.usr u ON c.usr = u.id WHERE
- ((c.barcode = ANY(actor_barcodes) AND c.active) OR c.barcode = in_barcode) AND NOT u.deleted ORDER BY usr;
- END IF;
- RETURN;
-END;
-$$ LANGUAGE plpgsql;
-
--- Evergreen DB patch 0783.schema.enforce_use_id_for_tcn.sql
---
--- Sets the TCN value in the biblio.record_entry row to bib ID,
--- if the appropriate setting is in place
---
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0783', :eg_version);
-
--- FIXME: add/check SQL statements to perform the upgrade
-CREATE OR REPLACE FUNCTION evergreen.maintain_901 () RETURNS TRIGGER AS $func$
-use strict;
-use MARC::Record;
-use MARC::File::XML (BinaryEncoding => 'UTF-8');
-use MARC::Charset;
-use Encode;
-use Unicode::Normalize;
-
-MARC::Charset->assume_unicode(1);
-
-my $schema = $_TD->{table_schema};
-my $marc = MARC::Record->new_from_xml($_TD->{new}{marc});
-
-my @old901s = $marc->field('901');
-$marc->delete_fields(@old901s);
-
-if ($schema eq 'biblio') {
- my $tcn_value = $_TD->{new}{tcn_value};
-
- # Set TCN value to record ID?
- my $id_as_tcn = spi_exec_query("
- SELECT enabled
- FROM config.global_flag
- WHERE name = 'cat.bib.use_id_for_tcn'
- ");
- if (($id_as_tcn->{processed}) && $id_as_tcn->{rows}[0]->{enabled} eq 't') {
- $tcn_value = $_TD->{new}{id};
- $_TD->{new}{tcn_value} = $tcn_value;
- }
-
- my $new_901 = MARC::Field->new("901", " ", " ",
- "a" => $tcn_value,
- "b" => $_TD->{new}{tcn_source},
- "c" => $_TD->{new}{id},
- "t" => $schema
- );
-
- if ($_TD->{new}{owner}) {
- $new_901->add_subfields("o" => $_TD->{new}{owner});
- }
-
- if ($_TD->{new}{share_depth}) {
- $new_901->add_subfields("d" => $_TD->{new}{share_depth});
- }
-
- $marc->append_fields($new_901);
-} elsif ($schema eq 'authority') {
- my $new_901 = MARC::Field->new("901", " ", " ",
- "c" => $_TD->{new}{id},
- "t" => $schema,
- );
- $marc->append_fields($new_901);
-} elsif ($schema eq 'serial') {
- my $new_901 = MARC::Field->new("901", " ", " ",
- "c" => $_TD->{new}{id},
- "t" => $schema,
- "o" => $_TD->{new}{owning_lib},
- );
-
- if ($_TD->{new}{record}) {
- $new_901->add_subfields("r" => $_TD->{new}{record});
- }
-
- $marc->append_fields($new_901);
-} else {
- my $new_901 = MARC::Field->new("901", " ", " ",
- "c" => $_TD->{new}{id},
- "t" => $schema,
- );
- $marc->append_fields($new_901);
-}
-
-my $xml = $marc->as_xml_record();
-$xml =~ s/\n//sgo;
-$xml =~ s/^<\?xml.+\?\s*>//go;
-$xml =~ s/>\s+</></go;
-$xml =~ s/\p{Cc}//go;
-
-# Embed a version of OpenILS::Application::AppUtils->entityize()
-# to avoid having to set PERL5LIB for PostgreSQL as well
-
-# If we are going to convert non-ASCII characters to XML entities,
-# we had better be dealing with a UTF8 string to begin with
-$xml = decode_utf8($xml);
-
-$xml = NFC($xml);
-
-# Convert raw ampersands to entities
-$xml =~ s/&(?!\S+;)/&/gso;
-
-# Convert Unicode characters to entities
-$xml =~ s/([\x{0080}-\x{fffd}])/sprintf('&#x%X;',ord($1))/sgoe;
-
-$xml =~ s/[\x00-\x1f]//go;
-$_TD->{new}{marc} = $xml;
-
-return "MODIFY";
-$func$ LANGUAGE PLPERLU;
-
-
COMMIT;
\set eg_version '''2.2.8'''
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.2.8', :eg_version);
--- Evergreen DB patch XXXX.function.axis_authority_tags_refs_aggregate.sql
---
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0784', :eg_version);
-
-CREATE OR REPLACE FUNCTION authority.axis_authority_tags_refs(a TEXT) RETURNS INT[] AS $$
- SELECT ARRAY_AGG(y) from (
- SELECT unnest(ARRAY_CAT(
- ARRAY[a.field],
- (SELECT ARRAY_ACCUM(x.id) FROM authority.control_set_authority_field x WHERE x.main_entry = a.field)
- )) y
- FROM authority.browse_axis_authority_field_map a
- WHERE axis = $1) x;
-$$ LANGUAGE SQL;
-
-CREATE OR REPLACE FUNCTION authority.btag_authority_tags_refs(btag TEXT) RETURNS INT[] AS $$
- SELECT ARRAY_AGG(y) from (
- SELECT unnest(ARRAY_CAT(
- ARRAY[a.authority_field],
- (SELECT ARRAY_ACCUM(x.id) FROM authority.control_set_authority_field x WHERE x.main_entry = a.authority_field)
- )) y
- FROM authority.control_set_bib_field a
- WHERE a.tag = $1) x
-$$ LANGUAGE SQL;
-
-CREATE OR REPLACE FUNCTION authority.atag_authority_tags_refs(atag TEXT) RETURNS INT[] AS $$
- SELECT ARRAY_AGG(y) from (
- SELECT unnest(ARRAY_CAT(
- ARRAY[a.id],
- (SELECT ARRAY_ACCUM(x.id) FROM authority.control_set_authority_field x WHERE x.main_entry = a.id)
- )) y
- FROM authority.control_set_authority_field a
- WHERE a.tag = $1) x
-$$ LANGUAGE SQL;
-
-
-
-INSERT INTO config.upgrade_log (version) VALUES ('0787');
-
-CREATE OR REPLACE FUNCTION authority.normalize_heading( marcxml TEXT, no_thesaurus BOOL ) RETURNS TEXT AS $func$
-DECLARE
- acsaf authority.control_set_authority_field%ROWTYPE;
- tag_used TEXT;
- nfi_used TEXT;
- sf TEXT;
- thes_code TEXT;
- cset INT;
- heading_text TEXT;
- tmp_text TEXT;
- first_sf BOOL;
- auth_id INT DEFAULT COALESCE(NULLIF(oils_xpath_string('//*[@tag="901"]/*[local-name()="subfield" and @code="c"]', marcxml), ''), '0')::INT;
-BEGIN
- SELECT control_set INTO cset FROM authority.record_entry WHERE id = auth_id;
-
- IF cset IS NULL THEN
- SELECT control_set INTO cset
- FROM authority.control_set_authority_field
- WHERE tag IN ( SELECT UNNEST(XPATH('//*[starts-with(@tag,"1")]/@tag',marcxml::XML)::TEXT[]))
- LIMIT 1;
- END IF;
-
- thes_code := vandelay.marc21_extract_fixed_field(marcxml,'Subj');
- IF thes_code IS NULL THEN
- thes_code := '|';
- ELSIF thes_code = 'z' THEN
- thes_code := COALESCE( oils_xpath_string('//*[@tag="040"]/*[@code="f"][1]', marcxml), '' );
- END IF;
-
- heading_text := '';
- FOR acsaf IN SELECT * FROM authority.control_set_authority_field WHERE control_set = cset AND main_entry IS NULL LOOP
- tag_used := acsaf.tag;
- nfi_used := acsaf.nfi;
- first_sf := TRUE;
- FOR sf IN SELECT * FROM regexp_split_to_table(acsaf.sf_list,'') LOOP
- tmp_text := oils_xpath_string('//*[@tag="'||tag_used||'"]/*[@code="'||sf||'"]', marcxml);
-
- IF first_sf AND tmp_text IS NOT NULL AND nfi_used IS NOT NULL THEN
-
- tmp_text := SUBSTRING(
- tmp_text FROM
- COALESCE(
- NULLIF(
- REGEXP_REPLACE(
- oils_xpath_string('//*[@tag="'||tag_used||'"]/@ind'||nfi_used, marcxml),
- $$\D+$$,
- '',
- 'g'
- ),
- ''
- )::INT,
- 0
- ) + 1
- );
-
- END IF;
-
- first_sf := FALSE;
-
- IF tmp_text IS NOT NULL AND tmp_text <> '' THEN
- heading_text := heading_text || E'\u2021' || sf || ' ' || tmp_text;
- END IF;
- END LOOP;
- EXIT WHEN heading_text <> '';
- END LOOP;
-
- IF heading_text <> '' THEN
- IF no_thesaurus IS TRUE THEN
- heading_text := tag_used || ' ' || public.naco_normalize(heading_text);
- ELSE
- heading_text := tag_used || '_' || COALESCE(nfi_used,'-') || '_' || thes_code || ' ' || public.naco_normalize(heading_text);
- END IF;
- ELSE
- heading_text := 'NOHEADING_' || thes_code || ' ' || MD5(marcxml);
- END IF;
- RETURN heading_text;
-END;
-$func$ LANGUAGE PLPGSQL IMMUTABLE;
COMMIT;
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.2.9', :eg_version);
-SELECT evergreen.upgrade_deps_block_check('0788', :eg_version);
-
--- New view including 264 as a potential tag for publisher and pubdate
-CREATE OR REPLACE VIEW reporter.old_super_simple_record AS
-SELECT r.id,
- r.fingerprint,
- r.quality,
- r.tcn_source,
- r.tcn_value,
- FIRST(title.value) AS title,
- FIRST(author.value) AS author,
- ARRAY_TO_STRING(ARRAY_ACCUM( DISTINCT publisher.value), ', ') AS publisher,
- ARRAY_TO_STRING(ARRAY_ACCUM( DISTINCT SUBSTRING(pubdate.value FROM $$\d+$$) ), ', ') AS pubdate,
- ARRAY_ACCUM( DISTINCT REPLACE(SUBSTRING(isbn.value FROM $$^\S+$$), '-', '') ) AS isbn,
- ARRAY_ACCUM( DISTINCT REGEXP_REPLACE(issn.value, E'^\\S*(\\d{4})[-\\s](\\d{3,4}x?)', E'\\1 \\2') ) AS issn
- FROM biblio.record_entry r
- LEFT JOIN metabib.full_rec title ON (r.id = title.record AND title.tag = '245' AND title.subfield = 'a')
- LEFT JOIN metabib.full_rec author ON (r.id = author.record AND author.tag IN ('100','110','111') AND author.subfield = 'a')
- LEFT JOIN metabib.full_rec publisher ON (r.id = publisher.record AND (publisher.tag = '260' OR (publisher.tag = '264' AND publisher.ind2 = '1')) AND publisher.subfield = 'b')
- LEFT JOIN metabib.full_rec pubdate ON (r.id = pubdate.record AND (pubdate.tag = '260' OR (pubdate.tag = '264' AND pubdate.ind2 = '1')) AND pubdate.subfield = 'c')
- LEFT JOIN metabib.full_rec isbn ON (r.id = isbn.record AND isbn.tag IN ('024', '020') AND isbn.subfield IN ('a','z'))
- LEFT JOIN metabib.full_rec issn ON (r.id = issn.record AND issn.tag = '022' AND issn.subfield = 'a')
- GROUP BY 1,2,3,4,5;
-
--- Update reporter.materialized_simple_record with new 264-based values for publisher and pubdate
-DELETE FROM reporter.materialized_simple_record WHERE id IN (
- SELECT DISTINCT record FROM metabib.full_rec WHERE tag = '264' AND subfield IN ('b', 'c')
-);
-
-INSERT INTO reporter.materialized_simple_record
- SELECT DISTINCT rossr.* FROM reporter.old_super_simple_record rossr INNER JOIN metabib.full_rec mfr ON mfr.record = rossr.id
- WHERE mfr.tag = '264' AND mfr.subfield IN ('b', 'c')
-;
-
COMMIT;
echo "Starting update of field_entry values. This will take a while..."
date
-psql -c "UPDATE metabib.identifier_field_entry set value = value;" &
-psql -c "UPDATE metabib.title_field_entry set value = value;" &
-psql -c "UPDATE metabib.author_field_entry set value = value;" &
-psql -c "UPDATE metabib.subject_field_entry set value = value;" &
-psql -c "UPDATE metabib.keyword_field_entry set value = value;" &
-psql -c "UPDATE metabib.series_field_entry set value = value;" &
+psql -U tadl -h tadl-dbrw -c "UPDATE metabib.identifier_field_entry set value = value;" &
+psql -U tadl -h tadl-dbrw -c "UPDATE metabib.title_field_entry set value = value;" &
+psql -U tadl -h tadl-dbrw -c "UPDATE metabib.author_field_entry set value = value;" &
+psql -U tadl -h tadl-dbrw -c "UPDATE metabib.subject_field_entry set value = value;" &
+psql -U tadl -h tadl-dbrw -c "UPDATE metabib.keyword_field_entry set value = value;" &
+psql -U tadl -h tadl-dbrw -c "UPDATE metabib.series_field_entry set value = value;" &
wait
date
echo "Starting update of combined field_entry values. This will also take a while..."
-psql -c "SELECT count(metabib.update_combined_index_vectors(id)) FROM biblio.record_entry WHERE NOT deleted;" &
+psql -U tadl -h tadl-dbrw -c "SELECT count(metabib.update_combined_index_vectors(id)) FROM biblio.record_entry WHERE NOT deleted;" &
echo "Starting creation of indexes from 0782..."
echo "There may be deadlock errors; if they happen, this script will"
echo "rebuild the affected invalid indexes at the end."
-psql -c "CREATE INDEX CONCURRENTLY usr_activity_usr_idx on actor.usr_activity (usr);" &
-psql -c "CREATE INDEX CONCURRENTLY hold_request_open_idx on action.hold_request (id) where cancel_time IS NULL AND fulfillment_time IS NULL;" &
-psql -c "CREATE INDEX CONCURRENTLY cp_available_by_circ_lib_idx on asset.copy (circ_lib) where status IN (0,7);" &
-psql -c "CREATE INDEX CONCURRENTLY edi_message_account_status_idx on acq.edi_message (account,status);" &
-psql -c "CREATE INDEX CONCURRENTLY atev_def_state on action_trigger.event (event_def,state);" &
-psql -c "CREATE INDEX CONCURRENTLY hold_transit_copy_hold_idx on action.hold_transit_copy (hold);" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY usr_activity_usr_idx on actor.usr_activity (usr);" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY hold_request_open_idx on action.hold_request (id) where cancel_time IS NULL AND fulfillment_time IS NULL;" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY cp_available_by_circ_lib_idx on asset.copy (circ_lib) where status IN (0,7);" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY edi_message_account_status_idx on acq.edi_message (account,status);" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY atev_def_state on action_trigger.event (event_def,state);" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY hold_transit_copy_hold_idx on action.hold_transit_copy (hold);" &
wait
echo "Starting creation of more indexes from 0782..."
-psql -c "CREATE INDEX CONCURRENTLY hold_request_current_copy_before_cap_idx on action.hold_request (current_copy) where capture_time IS NULL AND cancel_time IS NULL;" &
-psql -c "CREATE INDEX CONCURRENTLY edi_message_po_idx on acq.edi_message (purchase_order);" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY hold_request_current_copy_before_cap_idx on action.hold_request (current_copy) where capture_time IS NULL AND cancel_time IS NULL;" &
+psql -U tadl -h tadl-dbrw -c "CREATE INDEX CONCURRENTLY edi_message_po_idx on acq.edi_message (purchase_order);" &
wait
date
rebuildscript=`mktemp`
-psql -A -t -c "SELECT 'DROP INDEX ' || n.nspname || '.' || i.relname || ';' \
+psql -U tadl -h tadl-dbrw -A -t -c "SELECT 'DROP INDEX ' || n.nspname || '.' || i.relname || ';' \
FROM pg_index x \
JOIN pg_class c ON c.oid = x.indrelid \
JOIN pg_class i ON i.oid = x.indexrelid \
AND n.nspname IN ('action', 'action_trigger', 'actor', 'acq', 'asset') \
AND NOT x.indisvalid;" > $rebuildscript
-psql -A -t -c "SELECT pg_get_indexdef(i.oid) || ';' \
+psql -U tadl -h tadl-dbrw -A -t -c "SELECT pg_get_indexdef(i.oid) || ';' \
FROM pg_index x \
JOIN pg_class c ON c.oid = x.indrelid \
JOIN pg_class i ON i.oid = x.indexrelid \
AND n.nspname IN ('action', 'action_trigger', 'actor', 'acq', 'asset') \
AND NOT x.indisvalid;" >> $rebuildscript
-psql -a -f $rebuildscript
+psql -U tadl -h tadl-dbrw -a -f $rebuildscript
echo "Combined field_entry values and index creation complete"
date
ALTER TABLE metabib.subject_field_entry ALTER COLUMN index_vector TYPE pg_catalog.tsvector;
ALTER TABLE metabib.title_field_entry ALTER COLUMN index_vector TYPE pg_catalog.tsvector;
+COMMIT;
-- Make sure that tsearch2 exists as an extension (for a sufficiently
-- old Evergreen database, it might still be an unpackaged contrib).
CREATE EXTENSION IF NOT EXISTS tsearch2 SCHEMA public FROM unpackaged;
-- Halfway there! Goodbye tsearch2 extension!
DROP EXTENSION tsearch2;
+BEGIN;
-- Next up, re-creating all of the stuff we just dropped.
-- Indexes! Note to whomever: Do we even need these anymore?
SELECT evergreen.upgrade_deps_block_check('0756', :eg_version);
-- Drop some lingering old functions in search schema
+DROP FUNCTION IF EXISTS search.staged_fts(integer,integer,text,integer[],text[],text[],text[],text[],text[],text[],text,real,text,boolean,boolean,boolean,integer,integer,integer);
+DROP FUNCTION IF EXISTS search.staged_fts(integer,integer,text,integer[],integer[],text[],text[],text[],text[],text[],text[],text,real,text,boolean,boolean,boolean,integer,integer,integer);
DROP FUNCTION IF EXISTS search.staged_fts(INT,INT,TEXT,INT[],INT[],TEXT[],TEXT[],TEXT[],TEXT[],TEXT[],TEXT[],TEXT[],TEXT,TEXT,TEXT,TEXT[],TEXT,REAL,TEXT,BOOL,BOOL,BOOL,INT,INT,INT);
DROP FUNCTION IF EXISTS search.parse_search_args(TEXT);
DROP FUNCTION IF EXISTS search.explode_array(ANYARRAY);
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.3.11', :eg_version);
--- Remove [ and ] characters from seriestitle.
--- Those characters don't play well when searching.
-
-SELECT evergreen.upgrade_deps_block_check('0820', :eg_version); -- Callender
-
-INSERT INTO config.metabib_field_index_norm_map (field,norm,params, pos)
- SELECT m.id,
- i.id,
- $$["]",""]$$,
- '-1'
- FROM config.metabib_field m,
- config.index_normalizer i
- WHERE i.func IN ('replace')
- AND m.id IN (1);
-
-INSERT INTO config.metabib_field_index_norm_map (field,norm,params, pos)
- SELECT m.id,
- i.id,
- $$["[",""]$$,
- '-1'
- FROM config.metabib_field m,
- config.index_normalizer i
- WHERE i.func IN ('replace')
- AND m.id IN (1);
-
-
-SELECT evergreen.upgrade_deps_block_check('0821', :eg_version);
-
-CREATE OR REPLACE FUNCTION metabib.reingest_metabib_field_entries( bib_id BIGINT, skip_facet BOOL DEFAULT FALSE, skip_browse BOOL DEFAULT FALSE, skip_search BOOL DEFAULT FALSE ) RETURNS VOID AS $func$
-DECLARE
- fclass RECORD;
- ind_data metabib.field_entry_template%ROWTYPE;
- mbe_row metabib.browse_entry%ROWTYPE;
- mbe_id BIGINT;
- mbe_txt TEXT;
-BEGIN
- PERFORM * FROM config.internal_flag WHERE name = 'ingest.assume_inserts_only' AND enabled;
- IF NOT FOUND THEN
- IF NOT skip_search THEN
- FOR fclass IN SELECT * FROM config.metabib_class LOOP
- -- RAISE NOTICE 'Emptying out %', fclass.name;
- EXECUTE $$DELETE FROM metabib.$$ || fclass.name || $$_field_entry WHERE source = $$ || bib_id;
- END LOOP;
- END IF;
- IF NOT skip_facet THEN
- DELETE FROM metabib.facet_entry WHERE source = bib_id;
- END IF;
- IF NOT skip_browse THEN
- DELETE FROM metabib.browse_entry_def_map WHERE source = bib_id;
- END IF;
- END IF;
-
- FOR ind_data IN SELECT * FROM biblio.extract_metabib_field_entry( bib_id ) LOOP
- IF ind_data.field < 0 THEN
- ind_data.field = -1 * ind_data.field;
- END IF;
-
- IF ind_data.facet_field AND NOT skip_facet THEN
- INSERT INTO metabib.facet_entry (field, source, value)
- VALUES (ind_data.field, ind_data.source, ind_data.value);
- END IF;
-
- IF ind_data.browse_field AND NOT skip_browse THEN
- -- A caveat about this SELECT: this should take care of replacing
- -- old mbe rows when data changes, but not if normalization (by
- -- which I mean specifically the output of
- -- evergreen.oils_tsearch2()) changes. It may or may not be
- -- expensive to add a comparison of index_vector to index_vector
- -- to the WHERE clause below.
- mbe_txt := metabib.browse_normalize(ind_data.value, ind_data.field);
- SELECT INTO mbe_row * FROM metabib.browse_entry WHERE value = mbe_txt;
- IF FOUND THEN
- mbe_id := mbe_row.id;
- ELSE
- INSERT INTO metabib.browse_entry (value) VALUES (mbe_txt);
- mbe_id := CURRVAL('metabib.browse_entry_id_seq'::REGCLASS);
- END IF;
-
- INSERT INTO metabib.browse_entry_def_map (entry, def, source)
- VALUES (mbe_id, ind_data.field, ind_data.source);
- END IF;
-
- IF ind_data.search_field AND NOT skip_search THEN
- EXECUTE $$
- INSERT INTO metabib.$$ || ind_data.field_class || $$_field_entry (field, source, value)
- VALUES ($$ ||
- quote_literal(ind_data.field) || $$, $$ ||
- quote_literal(ind_data.source) || $$, $$ ||
- quote_literal(ind_data.value) ||
- $$);$$;
- END IF;
-
- END LOOP;
-
- RETURN;
-END;
-$func$ LANGUAGE PLPGSQL;
-
-
--- Evergreen DB patch 0825.data.bre_format.sql
---
--- Fix some templates that loop over bibs to not have duplicated/run-on titles
---
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0825', :eg_version);
-
--- I think we shy away from modifying templates on existing systems, but this seems pretty safe...
-UPDATE
- action_trigger.event_definition
-SET
- template = replace(template,'[% FOR cbreb IN target %]','[% FOR cbreb IN target %][% title = '''' %]')
-WHERE
- id IN (31,32);
-
COMMIT;
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.3.7', :eg_version);
-SELECT evergreen.upgrade_deps_block_check('0788', :eg_version);
-
--- New view including 264 as a potential tag for publisher and pubdate
-CREATE OR REPLACE VIEW reporter.old_super_simple_record AS
-SELECT r.id,
- r.fingerprint,
- r.quality,
- r.tcn_source,
- r.tcn_value,
- FIRST(title.value) AS title,
- FIRST(author.value) AS author,
- ARRAY_TO_STRING(ARRAY_ACCUM( DISTINCT publisher.value), ', ') AS publisher,
- ARRAY_TO_STRING(ARRAY_ACCUM( DISTINCT SUBSTRING(pubdate.value FROM $$\d+$$) ), ', ') AS pubdate,
- ARRAY_ACCUM( DISTINCT REPLACE(SUBSTRING(isbn.value FROM $$^\S+$$), '-', '') ) AS isbn,
- ARRAY_ACCUM( DISTINCT REGEXP_REPLACE(issn.value, E'^\\S*(\\d{4})[-\\s](\\d{3,4}x?)', E'\\1 \\2') ) AS issn
- FROM biblio.record_entry r
- LEFT JOIN metabib.full_rec title ON (r.id = title.record AND title.tag = '245' AND title.subfield = 'a')
- LEFT JOIN metabib.full_rec author ON (r.id = author.record AND author.tag IN ('100','110','111') AND author.subfield = 'a')
- LEFT JOIN metabib.full_rec publisher ON (r.id = publisher.record AND (publisher.tag = '260' OR (publisher.tag = '264' AND publisher.ind2 = '1')) AND publisher.subfield = 'b')
- LEFT JOIN metabib.full_rec pubdate ON (r.id = pubdate.record AND (pubdate.tag = '260' OR (pubdate.tag = '264' AND pubdate.ind2 = '1')) AND pubdate.subfield = 'c')
- LEFT JOIN metabib.full_rec isbn ON (r.id = isbn.record AND isbn.tag IN ('024', '020') AND isbn.subfield IN ('a','z'))
- LEFT JOIN metabib.full_rec issn ON (r.id = issn.record AND issn.tag = '022' AND issn.subfield = 'a')
- GROUP BY 1,2,3,4,5;
-
--- Update reporter.materialized_simple_record with new 264-based values for publisher and pubdate
-DELETE FROM reporter.materialized_simple_record WHERE id IN (
- SELECT DISTINCT record FROM metabib.full_rec WHERE tag = '264' AND subfield IN ('b', 'c')
-);
-
-INSERT INTO reporter.materialized_simple_record
- SELECT DISTINCT rossr.* FROM reporter.old_super_simple_record rossr INNER JOIN metabib.full_rec mfr ON mfr.record = rossr.id
- WHERE mfr.tag = '264' AND mfr.subfield IN ('b', 'c')
-;
-
COMMIT;
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.3.9', :eg_version);
-SELECT evergreen.upgrade_deps_block_check('0803', :eg_version);
-
-UPDATE config.org_unit_setting_type
-SET description = oils_i18n_gettext('circ.holds.default_shelf_expire_interval',
- 'The amount of time an item will be held on the shelf before the hold expires. For example: "2 weeks" or "5 days"',
- 'coust', 'description')
-WHERE name = 'circ.holds.default_shelf_expire_interval';
-
-
-SELECT evergreen.upgrade_deps_block_check('0804', :eg_version);
-
-UPDATE config.coded_value_map
-SET value = oils_i18n_gettext('169', 'Gwich''in', 'ccvm', 'value')
-WHERE ctype = 'item_lang' AND code = 'gwi';
-
--- Evergreen DB patch XXXX.schema.usrname_index.sql
---
--- Create search index on actor.usr.usrname
---
-
-SELECT evergreen.upgrade_deps_block_check('0808', :eg_version);
-
-CREATE INDEX actor_usr_usrname_idx ON actor.usr (evergreen.lowercase(usrname));
-
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0810', :eg_version);
-
-UPDATE authority.control_set_authority_field
- SET name = REGEXP_REPLACE(name, '^See Also', 'See From')
- WHERE tag LIKE '4__' AND control_set = 1;
-
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0811', :eg_version);
-
-DROP FUNCTION action.copy_related_hold_stats(integer);
-
-CREATE OR REPLACE FUNCTION action.copy_related_hold_stats(copy_id bigint)
- RETURNS action.hold_stats AS
-$BODY$
-DECLARE
- output action.hold_stats%ROWTYPE;
- hold_count INT := 0;
- copy_count INT := 0;
- available_count INT := 0;
- hold_map_data RECORD;
-BEGIN
-
- output.hold_count := 0;
- output.copy_count := 0;
- output.available_count := 0;
-
- SELECT COUNT( DISTINCT m.hold ) INTO hold_count
- FROM action.hold_copy_map m
- JOIN action.hold_request h ON (m.hold = h.id)
- WHERE m.target_copy = copy_id
- AND NOT h.frozen;
-
- output.hold_count := hold_count;
-
- IF output.hold_count > 0 THEN
- FOR hold_map_data IN
- SELECT DISTINCT m.target_copy,
- acp.status
- FROM action.hold_copy_map m
- JOIN asset.copy acp ON (m.target_copy = acp.id)
- JOIN action.hold_request h ON (m.hold = h.id)
- WHERE m.hold IN ( SELECT DISTINCT hold FROM action.hold_copy_map WHERE target_copy = copy_id ) AND NOT h.frozen
- LOOP
- output.copy_count := output.copy_count + 1;
- IF hold_map_data.status IN (0,7,12) THEN
- output.available_count := output.available_count + 1;
- END IF;
- END LOOP;
- output.total_copy_ratio = output.copy_count::FLOAT / output.hold_count::FLOAT;
- output.available_copy_ratio = output.available_count::FLOAT / output.hold_count::FLOAT;
-
- END IF;
-
- RETURN output;
-
-END;
-$BODY$
- LANGUAGE plpgsql VOLATILE
- COST 100;
-
-
COMMIT;
BEGIN;
INSERT INTO config.upgrade_log (version, applied_to) VALUES ('2.3.10', :eg_version);
-SELECT evergreen.upgrade_deps_block_check('0818', :eg_version);
-
-INSERT INTO config.org_unit_setting_type ( name, grp, label, description, datatype ) VALUES (
- 'circ.patron_edit.duplicate_patron_check_depth', 'circ',
- oils_i18n_gettext(
- 'circ.patron_edit.duplicate_patron_check_depth',
- 'Specify search depth for the duplicate patron check in the patron editor',
- 'coust',
- 'label'),
- oils_i18n_gettext(
- 'circ.patron_edit.duplicate_patron_check_depth',
- 'When using the patron registration page, the duplicate patron check will use the configured depth to scope the search for duplicate patrons.',
- 'coust',
- 'description'),
- 'integer')
-;
-
-
-
--- Evergreen DB patch 0819.schema.acn_dewey_normalizer.sql
---
--- Fixes Dewey call number sorting (per LP# 1150939)
---
-
--- check whether patch can be applied
-SELECT evergreen.upgrade_deps_block_check('0819', :eg_version);
-
-CREATE OR REPLACE FUNCTION asset.label_normalizer_dewey(TEXT) RETURNS TEXT AS $func$
- # Derived from the Koha C4::ClassSortRoutine::Dewey module
- # Copyright (C) 2007 LibLime
- # Licensed under the GPL v2 or later
-
- use strict;
- use warnings;
-
- my $init = uc(shift);
- $init =~ s/^\s+//;
- $init =~ s/\s+$//;
- $init =~ s!/!!g;
- $init =~ s/^([\p{IsAlpha}]+)/$1 /;
- my @tokens = split /\.|\s+/, $init;
- my $digit_group_count = 0;
- my $first_digit_group_idx;
- for (my $i = 0; $i <= $#tokens; $i++) {
- if ($tokens[$i] =~ /^\d+$/) {
- $digit_group_count++;
- if ($digit_group_count == 1) {
- $first_digit_group_idx = $i;
- }
- if (2 == $digit_group_count) {
- $tokens[$i] = sprintf("%-15.15s", $tokens[$i]);
- $tokens[$i] =~ tr/ /0/;
- }
- }
- }
- # Pad the first digit_group if there was only one
- if (1 == $digit_group_count) {
- $tokens[$first_digit_group_idx] .= '_000000000000000'
- }
- my $key = join("_", @tokens);
- $key =~ s/[^\p{IsAlnum}_]//g;
-
- return $key;
-
-$func$ LANGUAGE PLPERLU;
-
--- regenerate sort keys for any dewey call numbers
-UPDATE asset.call_number SET id = id WHERE label_class = 2;
-
COMMIT;