From 7529fba9165546a95a3ad80dc2e63a8e6309c8bd Mon Sep 17 00:00:00 2001 From: Dan Scott Date: Mon, 6 Jun 2011 11:33:57 -0400 Subject: [PATCH] Add Conifer's laundry list of tools and scripts Again, in the interest of expediency, get the Conifer tools into a branch, we can factor them out further later. Signed-off-by: Dan Scott --- tools/autobuild.pl | 139 +++ tools/config/config_z3950.sql | 210 ++++ tools/daily-scripts/OSUL-patrons-lang-prefs.sql | 10 + tools/daily-scripts/circ_date_to_expire_date.pl | 76 ++ tools/daily-scripts/delete_ill_books.pl | 99 ++ tools/daily-scripts/end_of_the_day.pl | 104 ++ tools/daily-scripts/reingest_uningested.pl | 72 ++ tools/daily-scripts/test_eg_services.pl | 48 + .../update_indexes_for_ampersand_equivalency.sql | 16 + tools/ebooks/prep_ebook_records.py | 482 +++++++++ tools/migration-scripts/circ_rules.sql | 29 + tools/migration-scripts/create_test_users.pl | 68 ++ tools/migration-scripts/demousers.sql | 1124 ++++++++++++++++++++ tools/migration-scripts/fixURIs.pl | 110 ++ tools/migration-scripts/fix_bad_marcxml.pl | 65 ++ tools/migration-scripts/fix_windsors_diacritics.pl | 166 +++ tools/migration-scripts/generate_copies.sql | 200 ++++ tools/migration-scripts/lul_import.pl | 94 ++ tools/migration-scripts/org_units.sql | 79 ++ tools/migration-scripts/patron_groups.sql | 58 + tools/migration-scripts/windsor_import.pl | 94 ++ .../migration-scripts/windsor_patron_load_base.csv | 1 + tools/migration-scripts/windsor_patrons.sql | 260 +++++ tools/patch_conifer.sh | 171 +++ tools/patron-load/lu_student_data.pl | 166 +++ tools/sip_user.sql | 29 + 26 files changed, 3970 insertions(+) create mode 100644 tools/autobuild.pl create mode 100644 tools/config/config_z3950.sql create mode 100644 tools/daily-scripts/OSUL-patrons-lang-prefs.sql create mode 100644 tools/daily-scripts/circ_date_to_expire_date.pl create mode 100644 tools/daily-scripts/delete_ill_books.pl create mode 100644 tools/daily-scripts/end_of_the_day.pl create mode 100644 tools/daily-scripts/reingest_uningested.pl create mode 100644 tools/daily-scripts/test_eg_services.pl create mode 100644 tools/daily-scripts/update_indexes_for_ampersand_equivalency.sql create mode 100644 tools/ebooks/prep_ebook_records.py create mode 100644 tools/migration-scripts/circ_rules.sql create mode 100644 tools/migration-scripts/create_test_users.pl create mode 100644 tools/migration-scripts/demousers.sql create mode 100644 tools/migration-scripts/fixURIs.pl create mode 100644 tools/migration-scripts/fix_bad_marcxml.pl create mode 100644 tools/migration-scripts/fix_windsors_diacritics.pl create mode 100644 tools/migration-scripts/generate_copies.sql create mode 100644 tools/migration-scripts/lul_import.pl create mode 100644 tools/migration-scripts/org_units.sql create mode 100644 tools/migration-scripts/patron_groups.sql create mode 100644 tools/migration-scripts/windsor_import.pl create mode 100644 tools/migration-scripts/windsor_patron_load_base.csv create mode 100644 tools/migration-scripts/windsor_patrons.sql create mode 100644 tools/patch_conifer.sh create mode 100644 tools/patron-load/lu_student_data.pl create mode 100644 tools/sip_user.sql diff --git a/tools/autobuild.pl b/tools/autobuild.pl new file mode 100644 index 0000000000..71e98b6035 --- /dev/null +++ b/tools/autobuild.pl @@ -0,0 +1,139 @@ +#!/usr/bin/perl +use strict; +use warnings; +use File::Spec; +use File::Path qw/make_path remove_tree/; + +=head1 Purpose + +This script is intended to automate the process of checking out current +versions of one or more branches (including trunk) from a Subversion +repository, run through the configure and make processes to ensure that +the basic build is successful, and then run additional steps such as +creating the database schema or running available tests. + +It's a super-simple stupid script. + +=head2 Usage + +Pass the relative directories for the branches that you want to build as +arguments to the script; for example, if you want to build both the +rel_1_6_0 branch and trunk, issue the following command: + +perl autobuild.pl branches/rel_1_6_0 trunk + +The script will create timestamped log files for the output from the various +steps in the log directory. + +=head2 TODO + +Lots. I don't intend this to ever get to the level of a hudson continuous +integration server, though. + +=over + +=item * Start using Getopt::Long to avoid hard-coded variables + +=item * Highlight errors at the various steps + +=item * Flesh out the packaging step to generate tarballs + +=back + +=cut + +my $repo_base = 'svn://svn.open-ils.org/'; +my $repo_type = 'ILS/'; +my $repo = $repo_base . $repo_type; +my $work_dir = '/tmp'; +my $export_dir = '/tmp/export'; +my $log_dir = '/tmp'; +my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(); +my $tstamp = ($year + 1900) . "-" . ($mon + 1) . "-$mday\_$hour-$min"; + +for my $release (@ARGV) { + my $export_dir = checkout($release); + configure($release, $export_dir); + package($release, $export_dir); + build($release, $export_dir); + test($release, $export_dir); +} + +=head2 sub checkout($release) + +Check to see if the local repo has already been created; if so, then just +update it. Otherwise, do a clean checkout. + +Then export the source to a clean export directory. + +Returns the export directory name + +=cut + +sub checkout { + my $release = shift; + my $source = $repo . $release; + my $export = File::Spec->catdir($export_dir, $release); + + my @log; + + chdir($work_dir); + if (-d $release) { + print "Release directory has already been created; just update\n"; + chdir($release); + @log = `svn up 2>&1`; + } else { + print "Check it out\n"; + @log = `svn co $source $release 2>&1`; + chdir($release); + } + + # Now export the code to a clean export directory + # First we create the complete path, then trim the base directory + # ("svn export" won't export to an existing directory) + make_path(File::Spec->catdir($export, '../')); + remove_tree($export); + print "Exporting the code\n"; + `svn export . $export 2>&1`; + + logit($release, 'svn', \@log); + + return $export; +} + +sub build { + my ($release, $export_dir) = @_; + chdir($export_dir); + my @log = `make 2>&1`; + logit($release, 'build', \@log); +} + +sub configure { + my ($release, $export_dir) = @_; + chdir($export_dir); + my @log = `./autogen.sh && ./configure --prefix=/openils --sysconf=/openils/conf 2>&1`; + logit($release, 'config', \@log); +} + +sub package { + my ($release, $export_dir) = @_; + chdir($export_dir); + + # Remove some files + # Generate changelog + chdir('..'); + # Create tarball + print "package stub\n"; +} + +sub test { + my ($release, $export_dir) = @_; + print "test stub\n"; +} + +sub logit { + my ($release, $type, $log) = @_; + open(LOGFH, '>', File::Spec->catfile($work_dir, $release, "$type\_$tstamp.log")); + print LOGFH @$log; + close(LOGFH); +} diff --git a/tools/config/config_z3950.sql b/tools/config/config_z3950.sql new file mode 100644 index 0000000000..ab1623f235 --- /dev/null +++ b/tools/config/config_z3950.sql @@ -0,0 +1,210 @@ +-- +-- PostgreSQL database dump +-- + +SET client_encoding = 'SQL_ASCII'; +SET standard_conforming_strings = off; +SET check_function_bodies = false; +SET client_min_messages = warning; +SET escape_string_warning = off; + +SET search_path = config, pg_catalog; + +-- +-- Name: z3950_attr_id_seq; Type: SEQUENCE SET; Schema: config; Owner: evergreen +-- + +SELECT pg_catalog.setval('z3950_attr_id_seq', 287, true); + + +-- +-- Data for Name: z3950_attr; Type: TABLE DATA; Schema: config; Owner: evergreen +-- + +COPY z3950_attr (id, source, name, label, code, format, truncation) FROM stdin; +110 McGill tcn Title Control Number 12 1 0 +111 McGill isbn ISBN 7 6 0 +112 McGill lccn LCCN 9 1 0 +113 McGill author Author 1003 6 0 +114 McGill title Title 4 6 0 +115 McGill issn ISSN 8 1 0 +116 McGill publisher Publisher 1018 6 0 +117 McGill pubdate Publication Date 31 1 0 +118 McGill item_type Item Type 1001 1 0 +119 UChicago tcn Title Control Number 12 1 0 +120 UChicago isbn ISBN 7 6 0 +121 UChicago lccn LCCN 9 1 0 +122 UChicago author Author 1003 6 0 +123 UChicago title Title 4 6 0 +124 UChicago issn ISSN 8 1 0 +125 UChicago publisher Publisher 1018 6 0 +126 UChicago pubdate Publication Date 31 1 0 +127 UChicago item_type Item Type 1001 1 0 +128 UMontréal tcn Title Control Number 12 1 0 +129 UMontréal isbn ISBN 7 6 0 +130 UMontréal lccn LCCN 9 1 0 +131 UMontréal author Author 1003 6 0 +132 UMontréal title Title 4 6 0 +133 UMontréal issn ISSN 8 1 0 +134 UMontréal publisher Publisher 1018 6 0 +135 UMontréal pubdate Publication Date 31 1 0 +136 UMontréal item_type Item Type 1001 1 0 +137 ULaval tcn Title Control Number 12 1 0 +138 ULaval isbn ISBN 7 6 0 +139 ULaval lccn LCCN 9 1 0 +140 ULaval author Author 1003 6 0 +141 ULaval title Title 4 6 0 +142 ULaval issn ISSN 8 1 0 +143 ULaval publisher Publisher 1018 6 0 +144 ULaval pubdate Publication Date 31 1 0 +145 ULaval item_type Item Type 1001 1 0 +146 UofT tcn Title Control Number 12 1 0 +147 UofT isbn ISBN 7 6 0 +148 UofT lccn LCCN 9 1 0 +149 UofT author Author 1003 6 0 +150 UofT title Title 4 6 0 +151 UofT issn ISSN 8 1 0 +152 UofT publisher Publisher 1018 6 0 +153 UofT pubdate Publication Date 31 1 0 +154 UofT item_type Item Type 1001 1 0 +174 Coutts isbn ISBN 7 6 0 +191 BAnQ tcn Title Control Number 12 1 0 +192 BAnQ isbn ISBN 7 6 0 +193 BAnQ lccn LCCN 9 1 0 +194 BAnQ author Author 1003 6 0 +195 BAnQ title Title 4 6 0 +196 BAnQ issn ISSN 8 1 0 +197 BAnQ publisher Publisher 1018 6 0 +198 BAnQ pubdate Publication Date 31 1 0 +199 BAnQ item_type Item Type 1001 1 0 +101 UWO tcn Title Control Number 12 1 0 +102 UWO author Author 1003 1 0 +103 UWO isbn ISBN 7 1 0 +104 UWO title Title 4 1 0 +105 UWO lccn LCCN 9 1 0 +106 UWO issn ISSN 8 1 0 +164 AMICUS tcn Title Control Number 12 1 1 +165 AMICUS isbn ISBN 7 6 1 +166 AMICUS lccn LCCN 9 1 1 +167 AMICUS author Author 1003 6 1 +168 AMICUS title Title 4 6 1 +169 AMICUS issn ISSN 8 1 1 +170 AMICUS publisher Publisher 1018 6 1 +171 AMICUS pubdate Publication Date 31 1 1 +172 AMICUS item_type Item Type 1001 1 1 +182 NLM tcn Title Control Number 12 1 1 +183 NLM isbn ISBN 7 6 1 +184 NLM lccn LCCN 9 1 1 +185 NLM author Author 1003 6 1 +186 NLM title Title 4 6 1 +187 NLM issn ISSN 8 1 1 +188 NLM publisher Publisher 1018 6 1 +189 NLM pubdate Publication Date 31 1 1 +190 NLM item_type Item Type 1001 1 1 +200 uottawa tcn Title Control Number 12 1 1 +201 uottawa isbn ISBN 7 1 1 +202 uottawa lccn LCCN 9 1 1 +203 uottawa author Author 1003 1 1 +204 uottawa title Title 4 1 1 +205 uottawa issn ISSN 8 1 1 +208 uottawa item_type Item Type 1001 1 1 +218 ualberta tcn Title Control Number 12 1 1 +219 ualberta isbn ISBN 7 6 1 +220 ualberta lccn LCCN 9 1 1 +221 ualberta author Author 1003 6 1 +222 ualberta title Title 4 6 1 +223 ualberta issn ISSN 8 1 1 +224 ualberta publisher Publisher 1018 6 1 +225 ualberta pubdate Publication Date 31 1 1 +226 ualberta item_type Item Type 1001 1 1 +227 yorku tcn Title Control Number 12 1 1 +228 yorku isbn ISBN 7 6 1 +229 yorku lccn LCCN 9 1 1 +230 yorku author Author 1003 6 1 +231 yorku title Title 4 6 1 +232 yorku issn ISSN 8 1 1 +234 yorku pubdate Publication Date 31 1 1 +236 umanitoba tcn Title Control Number 12 1 1 +237 umanitoba isbn ISBN 7 6 1 +238 umanitoba lccn LCCN 9 1 1 +239 umanitoba author Author 1003 6 1 +240 umanitoba title Title 4 6 1 +241 umanitoba issn ISSN 8 1 1 +242 umanitoba publisher Publisher 1018 6 1 +243 umanitoba pubdate Publication Date 31 1 1 +244 umanitoba item_type Item Type 1001 1 1 +245 umich tcn Title Control Number 12 1 1 +246 umich isbn ISBN 7 6 1 +247 umich lccn LCCN 9 1 1 +248 umich author Author 1003 6 1 +249 umich title Title 4 6 1 +250 umich issn ISSN 8 1 1 +252 umich pubdate Publication Date 31 1 1 +254 TUG tcn Title Control Number 12 1 1 +255 TUG isbn ISBN 7 6 1 +256 TUG lccn LCCN 9 1 1 +257 TUG author Author 1003 6 1 +258 TUG title Title 4 6 1 +259 TUG issn ISSN 8 1 1 +260 TUG publisher Publisher 1018 6 1 +261 TUG pubdate Publication Date 31 1 1 +262 TUG item_type Item Type 1001 1 1 +263 CarletonU tcn Title Control Number 12 1 1 +264 CarletonU isbn ISBN 7 1 1 +265 CarletonU lccn LCCN 9 1 1 +266 CarletonU author Author 1003 1 1 +267 CarletonU title Title 4 1 1 +268 CarletonU issn ISSN 8 1 1 +269 CarletonU item_type Item Type 1001 1 1 +270 UBC tcn Title Control Number 12 1 1 +271 UBC isbn ISBN 7 6 1 +272 UBC lccn LCCN 9 1 1 +273 UBC author Author 1003 6 1 +274 UBC title Title 4 6 1 +275 UBC issn ISSN 8 1 1 +276 UBC publisher Publisher 1018 6 1 +277 UBC pubdate Publication Date 31 1 1 +278 UBC item_type Item Type 1001 1 1 +279 IndianaU tcn Title Control Number 12 1 1 +280 IndianaU isbn ISBN 7 6 1 +281 IndianaU lccn LCCN 9 1 1 +282 IndianaU author Author 1003 6 1 +283 IndianaU title Title 4 6 1 +284 IndianaU issn ISSN 8 1 1 +285 IndianaU publisher Publisher 1018 6 1 +286 IndianaU pubdate Publication Date 31 1 1 +287 IndianaU item_type Item Type 1001 1 1 +\. + + +-- +-- Data for Name: z3950_source; Type: TABLE DATA; Schema: config; Owner: evergreen +-- + +COPY z3950_source (name, label, host, port, db, record_format, transmission_format, auth) FROM stdin; +UofT University of Toronto sirsi.library.utoronto.ca 2200 UNICORN F usmarc f +UMontréal Université de Montréal atrium.bib.umontreal.ca 210 ADVANCE FI usmarc f +McGill McGill University aleph.mcgill.ca 210 MUSE FI usmarc f +UChicago University of Chicago ipac.lib.uchicago.edu 210 uofc FI usmarc f +NLM NLM tegument.nlm.nih.gov 7090 VOYAGER FI usmarc f +BAnQ Bibliothèque et Archives nationales du Québec www.biblinat.gouv.qc.ca 210 IRIS FI usmarc f +Coutts Coutts z3950.couttsinfo.com 210 USMARC F usmarc t +ULaval Université Laval ariane2.ulaval.ca 2200 unicorn FI usmarc f +UWO University of Western Ontario alpha.lib.uwo.ca 210 INNOPAC FI usmarc f +AMICUS AMICUS amicus.nlc-bnc.ca 210 NL F usmarc t +uottawa University of Ottawa orbis.uottawa.ca 210 INNOPAC F usmarc f +ualberta University of Alberta ualapp.library.ualberta.ca 2200 unicorn F usmarc f +yorku York University theta.library.yorku.ca 2200 unicorn F usmarc f +umanitoba University of Manitoba lrpapp.cc.umanitoba.ca 2200 unicorn B usmarc f +umich University of Michigan z3950.lib.umich.edu 210 miu01_pub F usmarc f +TUG Tri-University Group 129.97.129.194 7090 voyager FI usmarc f +CarletonU Carleton University catalogue.library.carleton.ca 210 INNOPAC FI usmarc f +UBC University of British Columbia portage.library.ubc.ca 7090 voyager FI usmarc f +IndianaU Indiana State University luis.indstate.edu 7090 voyager FI usmarc f +\. + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/tools/daily-scripts/OSUL-patrons-lang-prefs.sql b/tools/daily-scripts/OSUL-patrons-lang-prefs.sql new file mode 100644 index 0000000000..be7ca94ad7 --- /dev/null +++ b/tools/daily-scripts/OSUL-patrons-lang-prefs.sql @@ -0,0 +1,10 @@ +-- Ensure that we have a stat cat entry for language preference for OSUL users +-- NOTE: We should cut over to usr_settings at some point +INSERT INTO actor.stat_cat_entry_usr_map (stat_cat_entry, stat_cat, target_usr) + SELECT 'English', 2, au.id + FROM actor.usr au + WHERE au.id NOT IN ( + SELECT target_usr + FROM actor.stat_cat_entry_usr_map + ) AND home_ou = 103 +; diff --git a/tools/daily-scripts/circ_date_to_expire_date.pl b/tools/daily-scripts/circ_date_to_expire_date.pl new file mode 100644 index 0000000000..50492edb25 --- /dev/null +++ b/tools/daily-scripts/circ_date_to_expire_date.pl @@ -0,0 +1,76 @@ +#!/usr/bin/perl -w +use strict; +use warnings; + +# Evergreen sets due dates that are past the user's expiry date + +# Let's fix that after the fact, for now, by setting the due dates to the user's expiry date + +use DBI; +use Getopt::Long; +use OpenSRF::EX qw/:try/; +use OpenSRF::Utils qw/:daemon/; +use OpenSRF::System; +use OpenSRF::AppSession; +use OpenSRF::Utils::SettingsClient; + +my ($config, $set_due_time) = ('/openils/conf/opensrf_core.xml', 0); + +GetOptions( + "bootstrap=s" => \$config, + "set_due_time" => \$set_due_time, +); + +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $sc = OpenSRF::Utils::SettingsClient->new; +my $db_driver = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => 'driver' ); +my $db_host = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'host' ); +my $db_port = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'port' ); +my $db_name = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'db' ); +my $db_user = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'user' ); +my $db_pw = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'pw' ); + +my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + +my $dbh = DBI->connect($dsn,$db_user,$db_pw, {pg_enable_utf8 => 1, RaiseError => 1}); + +end_of_day($set_due_time); + +$dbh->disconnect; + +sub end_of_day { + my $set_due_time = shift; + + my $select_stmt = < au.expire_date + AND au.expire_date > NOW() + AND au.expire_date < NOW() + '2 years'::interval + AND au.home_ou IN (103, 110, 126) + ORDER BY au.expire_date +STMT + + my $update_stmt = < au.expire_date + AND au.expire_date > NOW() + AND au.expire_date < NOW() + '2 years'::interval + AND au.home_ou IN (103, 110, 126) + AND ac.checkin_time IS NULL +UPDATE + + + my $results = $dbh->selectall_arrayref($select_stmt); + print localtime() . " - found " . scalar(@$results) . " circulation transactions to update where due_date > expire_date\n"; + if ($set_due_time) { + my $stmt = $dbh->prepare($update_stmt); + my $updates = $stmt->execute(); + print "Updated $updates circulation transactions.\n"; + } +} + diff --git a/tools/daily-scripts/delete_ill_books.pl b/tools/daily-scripts/delete_ill_books.pl new file mode 100644 index 0000000000..f230433083 --- /dev/null +++ b/tools/daily-scripts/delete_ill_books.pl @@ -0,0 +1,99 @@ +#!/usr/bin/perl -w +use strict; + +use DBI; +use Getopt::Long; +use OpenSRF::EX qw/:try/; +use OpenSRF::Utils qw/:daemon/; +use OpenSRF::System; +use OpenSRF::AppSession; +use OpenSRF::Utils::SettingsClient; +use File::Find; + +my ($config) = ('/openils/conf/opensrf_core.xml'); + +GetOptions( + "bootstrap=s" => \$config, +); + +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $sc = OpenSRF::Utils::SettingsClient->new; +my $db_driver = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => 'driver' ); +my $db_host = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'host' ); +my $db_port = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'port' ); +my $db_name = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'db' ); +my $db_user = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'user' ); +my $db_pw = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'pw' ); + +my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + +my $dbh = DBI->connect($dsn,$db_user,$db_pw, {pg_enable_utf8 => 1, RaiseError => 1}); + +delete_racer_callnumbers(); + +$dbh->disconnect; + +sub delete_racer_callnumbers { + my $select_stmt = <selectcol_arrayref($select_stmt); + print localtime() . " - found " . scalar(@$results) . " RACER book call numbers to delete:\n"; + if (scalar(@$results)) { + foreach (@$results) { + print "\t$_\n"; + } + my $stmt = $dbh->prepare($delete_stmt); + my $updates = $stmt->execute(); + } +} + diff --git a/tools/daily-scripts/end_of_the_day.pl b/tools/daily-scripts/end_of_the_day.pl new file mode 100644 index 0000000000..fafe56da9a --- /dev/null +++ b/tools/daily-scripts/end_of_the_day.pl @@ -0,0 +1,104 @@ +#!/usr/bin/perl -w + +# Sets the due time of items with a given loan period for a given library to 23:59:59 + +# This is a temporary workaround for Evergreen's assumption that the +# fine generating script will only run once a day, to avoid dinging a patron +# with an overdue charge at 48 hours + 5 minutes rather than at the end of the +# day that 48 hours falls on. + +# We also found that editing the due date for a given item sets the corresponding +# due time to 00:00 - which isn't great, as that means that it is due the minute +# the day starts. So, for now, we'll set all daily / weekly loans or those loans +# that are due exactly at midnight to being due at 23:59:59 - the very last second +# of the day on which it is due. This probably meets our patrons' expectations a bit +# better. + +use DBI; +use Getopt::Long; +use OpenSRF::EX qw/:try/; +use OpenSRF::Utils qw/:daemon/; +use OpenSRF::System; +use OpenSRF::AppSession; +use OpenSRF::Utils::SettingsClient; + +my ($config, $set_due_time) = ('/openils/conf/opensrf_core.xml', 0); + +GetOptions( + "bootstrap=s" => \$config, + "set_due_time" => \$set_due_time, +); + +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $sc = OpenSRF::Utils::SettingsClient->new; +my $db_driver = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => 'driver' ); +my $db_host = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'host' ); +my $db_port = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'port' ); +my $db_name = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'db' ); +my $db_user = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'user' ); +my $db_pw = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'pw' ); + +my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + +my $dbh = DBI->connect($dsn,$db_user,$db_pw, {pg_enable_utf8 => 1, RaiseError => 1}); + +end_of_day($set_due_time); + +$dbh->disconnect; + +sub end_of_day { + my $set_due_time = shift; + + my $select_stmt = <selectcol_arrayref($select_stmt); + print localtime() . " - found " . scalar(@$results) . " circulation transactions to update:\n"; + foreach (@$results) { + print "\t$_\n"; + } + if ($set_due_time) { + my $stmt = $dbh->prepare($update_stmt); + my $updates = $stmt->execute(); + print "Updated $updates circulation transactions.\n"; + } +} diff --git a/tools/daily-scripts/reingest_uningested.pl b/tools/daily-scripts/reingest_uningested.pl new file mode 100644 index 0000000000..9aa16a4ebf --- /dev/null +++ b/tools/daily-scripts/reingest_uningested.pl @@ -0,0 +1,72 @@ +#!/usr/bin/perl -w +use strict; +use warnings; + +# Reingest biblio.record_entry records that didn't get ingested due to the simple_rec_sync bug +# Ingested records are expected to have an entry in the keyword index +# Might want to build a variation on this that reingests edited records on a nightly basis + +use DBI; +use Getopt::Long; +use OpenSRF::EX qw/:try/; +use OpenSRF::Utils qw/:daemon/; +use OpenSRF::System; +use OpenSRF::AppSession; +use OpenSRF::Utils::SettingsClient; + +my ($config, $reingest) = ('/openils/conf/opensrf_core.xml', 0); + +GetOptions( + "bootstrap=s" => \$config, + "reingest" => \$reingest, +); + +OpenSRF::System->bootstrap_client( config_file => $config ); + +my $sc = OpenSRF::Utils::SettingsClient->new; +my $db_driver = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => 'driver' ); +my $db_host = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'host' ); +my $db_port = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'port' ); +my $db_name = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'db' ); +my $db_user = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'user' ); +my $db_pw = $sc->config_value( apps => 'open-ils.storage' => app_settings => databases => database => 'pw' ); + +my $dsn = "dbi:" . $db_driver . ":dbname=" . $db_name .';host=' . $db_host . ';port=' . $db_port; + +my $dbh = DBI->connect($dsn,$db_user,$db_pw, {pg_enable_utf8 => 1, RaiseError => 1}); + +reingest_empty_records($reingest); + +$dbh->disconnect; + +sub reingest_empty_records { + my $select_stmt = < 0 + EXCEPT + SELECT mrd.source + FROM metabib.keyword_field_entry mrd +STMT + + my $results = $dbh->selectcol_arrayref($select_stmt); + print localtime() . " - found " . scalar(@$results) . " records to reingest\n"; + foreach (@$results) { + print "\t$_\n"; + } + if ($reingest) { + + foreach (@$results) { + my $r = OpenSRF::AppSession + ->create( 'open-ils.ingest' ) + ->request( 'open-ils.ingest.full.biblio.record' => $_ ); + + while (!$r->complete) { $r->recv }; + + # Sleep for 10 seconds between each request to prevent blocking + sleep(10); + } + } +} + diff --git a/tools/daily-scripts/test_eg_services.pl b/tools/daily-scripts/test_eg_services.pl new file mode 100644 index 0000000000..b2cc538e9e --- /dev/null +++ b/tools/daily-scripts/test_eg_services.pl @@ -0,0 +1,48 @@ +#/usr/bin/perl +use strict; +use OpenSRF::AppSession; +use OpenSRF::System; + +OpenSRF::System->bootstrap_client(config_file => '/openils/conf/opensrf_core.xml'); + +my @services = qw{ + opensrf.settings + opensrf.math + opensrf.dbmath + open-ils.acq + open-ils.cat + open-ils.supercat + open-ils.search + open-ils.circ + open-ils.actor + open-ils.auth + open-ils.storage + open-ils.penalty + open-ils.cstore + open-ils.collections + open-ils.ingest + open-ils.reporter + open-ils.reporter-store + open-ils.permacrud + open-ils.pcrud + open-ils.trigger + open-ils.fielder + open-ils.vandelay + open-ils.resolver +}; + +foreach my $service (@services) { + check_service($service); +} + +sub check_service { + my $service = shift; + my $session = OpenSRF::AppSession->create($service); + my $request = $session->request("opensrf.system.echo", "All is well"); + if ($request && $request->gather() eq "All is well") { + print "$service: All is well\n"; + } else { + print "$service: FAIL\n"; + } +} + diff --git a/tools/daily-scripts/update_indexes_for_ampersand_equivalency.sql b/tools/daily-scripts/update_indexes_for_ampersand_equivalency.sql new file mode 100644 index 0000000000..e38eab44b0 --- /dev/null +++ b/tools/daily-scripts/update_indexes_for_ampersand_equivalency.sql @@ -0,0 +1,16 @@ +-- Cheap way of making titles and the like that contain "&" +-- return results for well-meaning queries that use "and" +BEGIN; +UPDATE metabib.author_field_entry + SET value = value || ' and' + WHERE value LIKE '%&%' AND value NOT LIKE '% and %'; +UPDATE metabib.keyword_field_entry + SET value = value || ' and' + WHERE value LIKE '%&%' AND value NOT LIKE '% and %'; +UPDATE metabib.subject_field_entry + SET value = value || ' and' + WHERE value LIKE '%&%' AND value NOT LIKE '% and %'; +UPDATE metabib.title_field_entry + SET value = value || ' and' + WHERE value LIKE '%&%' AND value NOT LIKE '% and %'; +COMMIT; diff --git a/tools/ebooks/prep_ebook_records.py b/tools/ebooks/prep_ebook_records.py new file mode 100644 index 0000000000..ff72922465 --- /dev/null +++ b/tools/ebooks/prep_ebook_records.py @@ -0,0 +1,482 @@ +#!/usr/bin/env python +""" +Prepare sets of electronic resource MARC records for loading into Evergreen + +To avoid duplicating MARC records in Conifer, to minimize manual labour, +and to make records as consistent as possible, we want to automate the +processing of electronic resource MARC records purchased by two or more +Conifer institutions. + +Each institution must confirm the standard data they require to be added +to e-book MARC records. The principle here is to identify standard +requirements that would be the same for each record and therefore can +be accommodated in batch load. +""" + +import os, os.path, sys, getopt, pymarc, pymarc.marc8, re, urllib2 +from BeautifulSoup import BeautifulSoup + +class Institution(): + """Defines standard settings for each Conifer institution""" + + def __init__(self): + """Initialize the Institution object""" + self.algoma = { \ + "code": "OSTMA", \ + "ebrary_code": "algomauca", \ + "proxy": "http://libproxy.auc.ca/login?url=", \ + "link_text": "Available online" \ + } + + self.laurentian = { \ + "code": "OSUL", \ + "ebrary_code": "jndlu", \ + "proxy": "https://librweb.laurentian.ca/login?url=", \ + "link_text": "Available online / disponible en ligne" \ + } + + self.windsor = { \ + "code": "OWA", \ + "ebrary_code": "oculwindsor", \ + "proxy": "http://ezproxy.uwindsor.ca/login?url=", \ + "link_text": "To view Windsor's electronic resource click here." \ + } + + def get_settings(self, lib): + """Return the settings for a library by name""" + return getattr(self, lib) + + +def do_help(): + ''' + Print help for the Conifer ebook MARC processor + ''' + + print ''' +Conifer ebook MARC processor + +This script takes a set of MARC records and processes them to generate a set +of MARC records ready for loading into the Conifer consortial library +system. The processing consists of taking the existing 856 field and creating +one or more new 856 fields for each Conifer institution that should have access +to these resources. + +The script customizes the following aspects of each record: + + * Adds one 856 per institution specified at the command line: + * $u (URL) - prepends the institutional proxy and, for eBrary records, + changes the insitutional code + * $y (link text) - sets preferred text of the link to the resource + * $z (public note) - sets public note for the resource + + * Adds a 710 field to identify the publisher using the value specified + at the command line + * Adds a 590 internal note field using the value specified at the command + line. + +Required arguments: + -i / --input : The name of the input MARC file. + + -o / --output : The name of the output MARC file. + + -p / --publisher : The name of the publisher to be inserted in a 710 field. + + -A / --algoma: Add an 856 for Algoma University + + -L / --laurentian: Add an 856 for Laurentian University + + -W / --windsor : Add an 856 for University of Windsor + +Optional arguments: + -n / --note : The text of the internal note to be inserted into a 590 field. + + -s / --sample : The name of the sample output MARC file (generates + 1 sample record for every 100 records processed) + + -h / --help : Prints help message + +Examples: + %s --algoma --windsor -i crkn.mrc -o /tmp/crkn_out.mrc -p "eBrary Inc." + ''' % sys.argv[0] + sys.exit(0) + +def consolidate_options(opts): + """Make long arguments the standard form in command line options""" + + _options = dict(opts) + + for key, val in opts: + if key == '-i': + _options['--input'] = val + elif key == '-o': + _options['--output'] = val + elif key == '-p': + _options['--publisher'] = val + elif key == '-n': + _options['--note'] = val + elif key == '-A': + _options['--algoma'] = val + elif key == '-L': + _options['--laurentian'] = val + elif key == '-W': + _options['--windsor'] = val + elif key == '-s': + _options['--sample'] = val + elif key == '-h': + _options['--help'] = val + + return _options + +def check_options(options): + """Check the validity of options that were passed in""" + + _help = False + + if '--help' in options: + do_help() + + if '--input' not in options: + print "* Missing -i / --input argument!" + _help = True + + if '--output' not in options: + print "* Missing -o / --output argument!" + _help = True + + if '--publisher' not in options: + print "* Missing -p / --publisher argument!" + _help = True + + _libraries = check_libraries(options) + if len(_libraries.keys()) == 0: + _help = True + + if _help == True: + do_help() + + # Get the input and output files + _input = options['--input'] + _output = options['--output'] + + try: + os.stat(_input) + except OSError: + print("* Cannot read input file %s" % (_input)) + sys.exit(0) + + try: + os.access(os.path.dirname(_output), os.W_OK) + except OSError: + print("* Cannot write to output path %s" % (os.path.dirname(_output))) + sys.exit(0) + + clean_opts = dict() + clean_opts['publisher'] = options['--publisher'] + + if '--sample' in options: + clean_opts['sample'] = options['--sample'] + + if '--note' in options: + clean_opts['note'] = options['--note'] + + clean_opts['libraries'] = _libraries + clean_opts['input'] = _input + clean_opts['output'] = _output + clean_opts['settings'] = Institution() + + return clean_opts + +def check_libraries(options): + """Build a dict of the libraries that were requested for this batch""" + + _libraries = dict() + if '--algoma' in options: + _libraries['algoma'] = True + + if '--laurentian' in options: + _libraries['laurentian'] = True + + if '--windsor' in options: + _libraries['windsor'] = True + + return _libraries + + +def parse_opts(): + """Get command-line arguments from the script""" + try: + _short_opts = 'i:o:p:ALWn:s:h' + _long_opts = ['input=', 'output=', 'publisher=', 'algoma', \ + 'laurentian', 'windsor', 'note=', 'sample=', 'help'] + opts = getopt.getopt(sys.argv[1:], _short_opts, _long_opts) + except getopt.GetoptError, ex: + print "* %s" % str(ex) + do_help() + + _options = consolidate_options(opts[0]) + return check_options(_options) + +def process_records(options): + """Converts raw ebook MARC records to Conifer-ready MARC records""" + + sample = '' + reader = pymarc.MARCReader( + open(options['input'], mode='rb'), to_unicode=True + ) + writer = pymarc.MARCWriter(open(options['output'], mode='wb')) + if ('sample' in options): + sample = pymarc.MARCWriter(open(options['sample'], mode='wb')) + + cnt = 0 + for record in reader: + cnt = cnt + 1 + try: + if not (record['856'] and record['856']['u']): + print("* No 856 for record # %s in file %s" + % (cnt, options['input']) + ) + + new_record = process_fields(record, options) + + writer.write(new_record) + if (sample and ((cnt == 1) or (cnt % 100 == 0))): + sample.write(new_record) + except Exception, ex: + print("* Error processing record %s - %s" % (cnt, ex)) + +def process_fields(record, options): + """Decide which fields to add, delete, and keep""" + + new_record = pymarc.Record(to_unicode=True, force_utf8=True) + + for field in record.get_fields(): + # Process all of the 856 fields + if field.tag == '856': + new_fields = process_urls(field, options) + if new_fields: + for new_856 in new_fields: + new_record.add_field(new_856) + # Strip out 9xx fields: we don't want local fields in our records + elif field.tag[0] == '9': + pass + # Strip out 300 fields that only contain placeholders + elif field.tag == '300' and field['a'] == 'p. cm.': + pass + else: + new_record.add_field(field) + + add_publisher(record, new_record, options) + + if 'note' in options: + note = pymarc.Field(tag = '590', + indicators = [' ', ' '], + subfields = [ + 'a', options['note'] + ] + ) + new_record.add_field(note) + + add_cat_source(new_record, options) + + return new_record + +def add_publisher(record, new_record, options): + """ + This is a convoluted way to avoid creating a new 710 if we already + have a matching 710 and just need to add the publisher relator code. + """ + + munge_publisher = False + need_publisher = True + need_relator = True + + # Iterate through all of the existing 710 fields + for sten in record.get_fields('710'): + for pub in sten.get_subfields('a'): + if pub == options['publisher']: + munge_publisher = True + for rel in sten.get_subfields('4'): + if rel == 'pbl': + need_publisher = False + need_relator = False + + if munge_publisher and need_relator: + sten.add_subfield('4', 'pbl') + need_publisher = False + + if need_publisher: + # Add the publisher, with relator code + seven_ten = pymarc.Field(tag = '710', + indicators = ['2', ' '], + subfields = [ + 'a', options['publisher'], + '4', 'pbl' + ] + ) + new_record.add_field(seven_ten) + + +def add_cat_source(record, options): + """Add or extend the 040 field to identify the cataloguing source""" + + # Only Windsor wants to do this + if 'windsor' not in options['libraries']: + return + + cat_source = record['040'] + if cat_source: + # Add subfield 'd' identifying Windsor + cat_source.add_subfield('d', 'CaOWA') + else: + # Add a 040 with subfield 'd' identifying Windsor + forty = pymarc.Field(tag = '040', + indicators = [' ', ' '], + subfields = [ 'd', 'CaOWA' ] + ) + record.add_field(forty) + + +def process_urls(field, options): + """Creates 856 fields required by Conifer""" + + new_fields = [] + + if not field['u']: + print "* No subfield 'u' found in this 856" + return None + + # If we have a ToC or author notes or whatever, replace with content + if field['u'].find('.loc.gov') > -1: + enrich = substitute_content(field) + if enrich and isinstance(enrich, pymarc.field.Field): + new_fields.append(enrich) + else: + for lib in options['libraries']: + data = options['settings'].get_settings(lib) + subs = get_subfields(field, data) + eight_five_six = pymarc.Field(tag = '856', + indicators = ['4', '0'], + subfields = subs + ) + new_fields.append(eight_five_six) + + return new_fields + +def substitute_content(field): + """Parses a ToC or author notes URL and generates a field""" + + url = field['u'] + + content_field = None + raw_content = '' + + # Skip machine-generated tables of contents + if url.find('/toc/') > -1: + return None + + # Get the data from the supplied URL + try: + req = urllib2.urlopen(url) + raw_content = BeautifulSoup(req.read()) + except urllib2.HTTPError, ex: + print("%s for URL %s" % (ex, url)) + return None + except urllib2.URLError, ex: + print("%s for URL %s" % (ex, url)) + return None + + content = process_loc_data(raw_content) + if not content: + return None + + if url.endswith('-b.html'): + # Biographical note + content_field = pymarc.Field( + tag = '545', + indicators = ['1', ' '], + subfields = ['a', content] + ) + elif url.endswith('-d.html'): + # Summary written by publisher + content_field = pymarc.Field( + tag = '520', + indicators = ['3', ' '], + subfields = ['a', content] + ) + + elif url.endswith('-t.html'): + # Table of contents + content_field = pymarc.Field( + tag = '505', + indicators = [' ', ' '], + subfields = ['a', content] + ) + else: + print("URL %s didn't match known LoC type" % (url)) + + return content_field + +def process_loc_data(raw_content): + """Given the LoC enriched data, make it usable""" + + # Short-circuit if we have an OCRed ToC; the quality is terrible + if raw_content.find(text='Electronic data is machine generated'): + return None + elif raw_content.find('
'):
+        return None
+
+    # Get all of the text after the horizontal rule
+    content = ' '.join(
+        raw_content.find('hr').findAllNext(text=True)
+    ).encode('utf8')
+
+    # Remove linefeeds
+    content = content.replace('\n', ' ')
+    content = content.replace('\r', ' ')
+
+    # Remove inline subject headings to avoid too much indexing boost
+    lcsh = content.find('Library of Congress subject headings')
+    if lcsh > -1:
+        content = content[0:lcsh]
+
+    # Farewell, starting and ending whitespace
+    content = content.strip().decode('utf8')
+
+    return content
+
+def get_subfields(field, data):
+    """Creates 856 subfields required by Conifer"""
+
+    subs = []
+    url = field['u']
+
+    # Is this an ebrary URL?
+    ebrary = False
+    if url.find('.ebrary.com') > -1:
+        ebrary = True
+        
+    # ebrary URLs look like: http://site.ebrary.com/lib//Doc?id=2001019
+    # we need to replace  with the library-specific channel
+    if ebrary:
+        ebrary_url = re.search(r'^(.+?/lib/).+?(/.+?)$', url) 
+        url = ebrary_url.group(1) + data['ebrary_code'] + ebrary_url.group(2)
+        subs.extend(['u', url])
+    else:
+        subs.extend(['u', data['proxy'] + field['u']])
+
+    # Check for a $z as the first 856; in Springer records, at least, this
+    # indicates a multi-volume set that requires keeping the $z around
+    if field.subfields[0] == 'z':
+        subs.extend([field.subfields[0], field.subfields[1]])
+
+    subs.extend([
+            'y', data['link_text'],
+            '9', data['code']
+    ])
+
+    return subs
+
+
+if __name__ == '__main__':
+
+    process_records(parse_opts())
diff --git a/tools/migration-scripts/circ_rules.sql b/tools/migration-scripts/circ_rules.sql
new file mode 100644
index 0000000000..9e3b9c8431
--- /dev/null
+++ b/tools/migration-scripts/circ_rules.sql
@@ -0,0 +1,29 @@
+INSERT INTO config.rule_circ_duration (name, extended, normal, shrt, max_renewals) VALUES
+	('120_days_2_renew', '120 days', '120 days', '120 days', 2), -- OSUL rules
+	('3_weeks_2_renew', '3 weeks', '3 weeks', '3 weeks', 2),
+	('14_days_3_renew', '14 days', '14 days', '14 days', 3), -- OWA rules
+	('28_days_3_renew', '28 days', '28 days', '28 days', 3),
+	('7_days_1_renew', '7 days', '7 days', '7 days', 1),
+	('7_days_3_renew', '7 days', '7 days', '7 days', 3),
+	('120_minutes_0_renew', '120 minutes', '120 minutes', '120 minutes', 0),
+
+	('28_days_5_renew', '28 days', '28 days', '28 days', 5), -- OWAL rules
+	('14_days_5_renew', '14 days', '14 days', '14 days', 5), 
+	('2_days_20_renew', '2 days', '2 days', '2 days', 20), 
+	('3_days_20_renew', '3 days', '3 days', '3 days', 20), 
+	('1_day_1_renew', '1 day', '1 day', '1 day', 1), 
+	('16_hours_20_renew', '16 hours', '16 hours', '16 hours', 20), 
+	('3_hours_20_renew', '3 hours', '3 hours', '3 hours', 20), 
+	('5_hours_20_renew', '5 hours', '5 hours', '5 hours', 20),
+	('non_circ', '0 days', '0 days', '0 days', 0); -- obviously a sign that permit_patron or permit_copy should be invoked instead, but oh well for now
+
+INSERT INTO config.rule_max_fine (name, amount) VALUES
+	('10_dollars', 10.00),
+	('50_dollars', 50.00);
+
+INSERT INTO config.rule_recuring_fine (name, high, normal, low, recurance_interval) VALUES
+	('free', 0.00, 0.00, 0.00, '1 day'),
+	('5_cents_per_minute', 00.05, 00.05, 00.05, '1 minute'),
+	('60_cents_per_day', 00.60, 00.60, 00.60, '1 day'),
+	('2_dollars_per_day', 2.00, 2.00, 2.00, '1 day'),
+	('10_dollars_per_day', 10.00, 10.00, 10.00, '1 day');
diff --git a/tools/migration-scripts/create_test_users.pl b/tools/migration-scripts/create_test_users.pl
new file mode 100644
index 0000000000..81fce98afe
--- /dev/null
+++ b/tools/migration-scripts/create_test_users.pl
@@ -0,0 +1,68 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+
+=head1 Generate a set of staff and users for testing Evergreen
+
+=over
+
+=item get_org_unit()
+
+Map user IDs to org_units for permissions
+
+=back
+
+=cut
+
+sub get_org_unit {
+	my $id = shift;
+	if ($id <= 10) {
+		return 105; # OSUL
+	} elsif ($id <=20) {
+		return 106; # WINDSYS
+	} elsif ($id <=30) {
+		return 111; # ALGOMASYS
+	} elsif ($id <=40) {
+		return 125; # NOHIN
+	}
+}
+
+my $password = 'demo123';
+
+my @profiles = ( 1, 4, 5, 10 );
+
+my $profile_info = {
+    '1' => 'user',
+    '4' => 'cat',
+    '5' => 'circ',
+    '10' => 'admin',
+};
+
+my $sql = "BEGIN;\n";
+
+foreach my $i (1..40) {
+    foreach my $profile (@profiles) {
+        my $twodigit = sprintf('%02d', $i);
+        my $barcode = '1' . ($profile == 10 ? '0' : $profile) . $twodigit;
+        my $usrname = $profile_info->{$profile . ""} . $i;
+        my $family_name = ucfirst($usrname);
+	my $org_unit = get_org_unit($i);
+        $sql .= < 'utf8', RecordFormat => 'USMARC' );
+use MARC::Batch;
+
+# Clean up URIs from MARCXML records prior to batch ingest
+#   * If we detect a proxy URL:
+#     * Ensure ind1 = 4
+#     * Ensure ind2 = 2
+#     * Ensure $9 = aou.shortname
+#   * Trim whitespace and other tweaks while we're at it?
+
+my ($input_file, $output_file);
+GetOptions(
+    'input=s' => \$input_file,
+    'output=s' => \$output_file
+);
+
+if (!$input_file or !$output_file) {
+    print("Please specify the following options:\n");
+    print("\t--input : input file of MARCXML records\n");
+    print("\t--output : output file of processed MARCXML records\n");
+    exit();
+}
+
+my $input = new MARC::Batch( 'XML', $input_file );
+$input->strict_off();
+my $output = MARC::File::XML->out( $output_file );
+
+my ($touched, $url_cnt, $ind1_cnt, $ind2_cnt, $sub9_cnt) = (0, 0, 0, 0, 0);
+my $marc;
+while ( try { $marc = $input->next() } otherwise { $marc = -1 } ) {
+    # Skip the record if we couldn't even decode it
+    next if ($marc == -1);
+
+    my $edited = 0;
+    my @uri_fields = $marc->field('856');
+    foreach my $uri (@uri_fields) {
+        my ($orgunit);
+
+        # There's no way we should have multiples, but let's iterate anyway
+        my @urls = $uri->subfield('u');
+
+        foreach my $url (@urls) {
+            # For general use we should factor these out to a hash. Oh well.
+
+            # We're filtering by proxy address, because theoretically anything
+            # that is not proxied is open to the world to access and doesn't
+            # need to be treated as a URI particular to that org_unit
+            if ($url =~ m/librweb.laurentian.ca/o) {
+                $orgunit = 'OSUL';
+            } elsif ($url =~ m/libproxy.auc.ca/o) {
+                $orgunit = 'OSTMA';
+            } elsif ($url =~ m/normedproxy.lakeheadu.ca/o) {
+                $orgunit = 'OSM';
+            } elsif ($url =~ m/ezproxy.uwindsor.ca/o or $url =~ m/webvoy.uwindsor.ca/o ) {
+                $orgunit = 'OWA';
+            }
+
+            if ($orgunit) {
+                my $clean_url = $url;
+                $clean_url =~ s/^\s*(.*?)\s*$/$1/o;
+                if ($url ne $clean_url) {
+                    $uri->update(u => $clean_url);
+                    $edited++;
+                    $url_cnt++;
+                }
+
+                my $ind1 = $uri->indicator(1);
+                if ($ind1 and $ind1 ne '1' and $ind1 ne '4') {
+                    $uri->update(ind1 => '4');
+                    $edited++;
+                    $ind1_cnt++;
+                }
+
+                my $ind2 = $uri->indicator(2);
+                if ($ind2 and $ind2 ne '0' and $ind2 ne '1') {
+                    $uri->update(ind2 => '1');
+                    $edited++;
+                    $ind2_cnt++;
+                }
+
+                # Risking that we only have one subfield 9 here
+                # Should be a slight risk as it's not defined in the spec
+                my $aou = $uri->subfield('9');
+                if (!$aou or $aou ne $orgunit) {
+                    $uri->update(9 => $orgunit);
+                    $edited++;
+                    $sub9_cnt++;
+                }
+            }
+        }
+    }
+    if ($edited) {
+        $touched++;
+    }
+    $output->write($marc);
+}
+$output->close();
+print "Touched $touched records to fix URIs.\n";
+print "\t$url_cnt URLs were touched\n";
+print "\t$ind1_cnt indicator 1 values were touched\n";
+print "\t$ind2_cnt indicator 2 values were touched\n";
+print "\t$sub9_cnt subfield '9' values were touched\n";
+
+# vim: et:ts=4:sw=4:
diff --git a/tools/migration-scripts/fix_bad_marcxml.pl b/tools/migration-scripts/fix_bad_marcxml.pl
new file mode 100644
index 0000000000..2e45dbd3c5
--- /dev/null
+++ b/tools/migration-scripts/fix_bad_marcxml.pl
@@ -0,0 +1,65 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+
+foreach my $file (@ARGV) {
+	process_file($file);
+}
+
+sub process_file {
+	my $file = shift;
+
+	# Empty datafields anger MARC::File::XML
+	open(FH, '<', $file) or die $!;
+	open(CLEAN, '>', "$file.new");
+
+	my ($trim, $lastline, $lineno) = (0, '', 1);
+	while () {
+		if ($_ =~ m## and $lastline =~ m#
+		#    In subtitle "sports" appears as "
+		#    ort
+		#    .
+		#  
+		#
+		# This will at least enable MARC::File::XML to process it:
+		if ($_ =~ m##o or $_ =~ m##o ) {
+			print STDERR "Bad subfield code \" at line $lineno of file $file\n";
+			$_ =~ s{}{}o;
+			$_ =~ s{}{}o;
+		} elsif ($_ =~ m##o or $_ =~ m##o) {
+			print STDERR "Bad subfield code < at line $lineno of file $file\n";
+			$_ =~ s{}{}o;
+			$_ =~ s{}{}o;
+		} elsif ($_ =~ m##o or $_ =~ m##o) {
+			print STDERR "Bad subfield code & at line $lineno of file $file\n";
+			$_ =~ s{}{}o;
+			$_ =~ s{}{}o;
+		} elsif ($_ =~ m# 'utf-8' );
+use MARC::Charset;
+use DBI;
+
+my ($marcfile, $marctype, $enc, $config, $username, $password) = ('/openils/migration/windsor/bib20090430.mrc', 'USMARC', 'UTF8', '/openils/conf/opensrf_core.xml');
+
+GetOptions(
+	'encoding=s'	=> \$enc, # set assumed MARC encoding for MARC::Charset
+	'config=s'	=> \$config, # location of OpenSRF core config file, defaults to /openils/conf/opensrf_core.xml
+	"username=s"	=> \$username, # EG username
+	"password=s"	=> \$password, # EG password
+);
+
+if ($enc) {
+	MARC::Charset->ignore_errors(1);
+	MARC::Charset->assume_encoding($enc);
+}
+
+OpenSRF::System->bootstrap_client( config_file => $config );
+
+# Login to Evergreen and get an authentication token
+my $auth = oils_login($username, $password);
+if (!$auth) {
+	die "Could not retrieve an authentication token";
+}
+
+select STDERR; $| = 1;
+select STDOUT; $| = 1;
+binmode STDOUT, ":utf8";
+
+my $batch = new MARC::Batch ( $marctype, $marcfile );
+$batch->strict_off();
+$batch->warnings_off();
+
+my $starttime = time;
+my $rec;
+my $count = 0;
+my $rec_count = 0;
+PROCESS: while ( try { $rec = $batch->next } otherwise { $rec = -1 } ) {
+	next if ($rec == -1);
+
+	$count++;
+
+	if ($rec->as_formatted =~ m/[^\x00-\x7f]/) {
+		$rec_count++;
+		print "$rec_count of $count\n";
+		update_id_field(\$rec);
+		fix_URIs(\$rec);
+		update_marc(\$rec);
+
+		# Exit nice and early so that we don't wander off and update a whole batch without testing
+		if ($rec_count > 0) {
+			exit;
+		}
+	}
+	
+}
+
+# Set the 001 and 901 to our record ID in Conifer
+# Windsor records are offset by 1 million from their legacy ID
+sub update_id_field {
+	my $rec = shift;
+
+	my $tcn = $$rec->field('001');
+	my $rec_id = $tcn->data + 1000000;
+	$tcn->update($rec_id);
+	my $id_field = MARC::Field->new('901', '', '', 'a' => $rec_id, 'b' => 'Unknown', 'c' => $rec_id);
+	$$rec->append_fields($id_field);
+}
+
+sub fix_URIs {
+	my $marc = shift;
+
+	my @uri_fields = $$marc->field('856');
+	foreach my $uri (@uri_fields) {
+		my ($orgunit);
+
+		# There's no way we should have multiples, but let's iterate anyway
+		my @urls = $uri->subfield('u');
+
+		foreach my $url (@urls) {
+			# For general use we should factor these out to a hash. Oh well.
+
+			# We're filtering by proxy address, because theoretically anything
+			# that is not proxied is open to the world to access and doesn't
+			# need to be treated as a URI particular to that org_unit
+			if ($url =~ m/librweb.laurentian.ca/o) {
+				$orgunit = 'OSUL';
+			} elsif ($url =~ m/libproxy.auc.ca/o) {
+				$orgunit = 'OSTMA';
+			} elsif ($url =~ m/normedproxy.lakeheadu.ca/o) {
+				$orgunit = 'OSM';
+			} elsif ($url =~ m/ezproxy.uwindsor.ca/o or $url =~ m/webvoy.uwindsor.ca/o ) {
+				$orgunit = 'OWA';
+			}
+
+			if ($orgunit) {
+				my $clean_url = $url;
+				$clean_url =~ s/^\s*(.*?)\s*$/$1/o;
+				if ($url ne $clean_url) {
+					$uri->update(u => $clean_url);
+				}
+
+				my $ind1 = $uri->indicator(1);
+				if ($ind1 and $ind1 ne '1' and $ind1 ne '4') {
+					$uri->update(ind1 => '4');
+				}
+
+				my $ind2 = $uri->indicator(2);
+				if ($ind2 and $ind2 ne '0' and $ind2 ne '1') {
+					$uri->update(ind2 => '1');
+				}
+
+				# Risking that we only have one subfield 9 here
+				# Should be a slight risk as it's not defined in the spec
+				my $aou = $uri->subfield('9');
+				if (!$aou or $aou ne $orgunit) {
+					$uri->update(9 => $orgunit);
+				}
+			}
+		}
+	}
+}
+
+sub update_marc {
+	my $rec = shift;
+
+	# Borrowed from marc2bre.pl to get clean XML
+	(my $xml = $$rec->as_xml_record()) =~ s/\n//sog;
+	$xml =~ s/^<\?xml.+\?\s*>//go;
+	$xml =~ s/>\s+entityize($xml);
+	$xml =~ s/[\x00-\x1f]//go;
+
+	# Update and ingest this puppy
+	my $update = OpenILS::Application::AppUtils->simplereq('open-ils.cat', 
+		'open-ils.cat.biblio.record.xml.update', 
+		($auth, int($$rec->field('001')->data), $xml)
+	);
+
+	# Return the cleaned-up XML in case we want to inspect it
+	return $xml;
+}
+
+
diff --git a/tools/migration-scripts/generate_copies.sql b/tools/migration-scripts/generate_copies.sql
new file mode 100644
index 0000000000..7dadd9ac15
--- /dev/null
+++ b/tools/migration-scripts/generate_copies.sql
@@ -0,0 +1,200 @@
+BEGIN;
+SET search_path TO scratchpad, public;
+
+-- Map libraries
+UPDATE staging_items
+	SET owning_lib = 'OSUL'
+	WHERE owning_lib = 'DESMARAIS';
+
+UPDATE staging_items
+	SET owning_lib = 'OSTMA'
+	WHERE owning_lib = 'ALGOMA';
+
+UPDATE staging_items
+	SET owning_lib = 'CRC'
+	WHERE location = 'ELSE-CURR';
+
+UPDATE staging_items
+	SET owning_lib = 'VALE'
+	WHERE location = 'INCO';
+
+UPDATE staging_items
+	SET owning_lib = 'LDRC'
+	WHERE location = 'EDUCATION';
+
+UPDATE staging_items
+	SET owning_lib = 'OSM', location = 'WWW'
+	WHERE location = 'HIRC-WWW';
+
+UPDATE staging_items
+	SET owning_lib = 'MRC'
+	WHERE location = 'DESM-MRC';
+
+UPDATE staging_items
+	SET owning_lib = 'NOSME'
+	WHERE location IN ('HIRCE-AV', 'HIRCE-BOOK', 'HIRCE-CIRC', 'HIRCE-PER', 'HIRCE-REF');
+
+UPDATE staging_items
+	SET owning_lib = 'NOSMW'
+	WHERE location IN ('HIRCW-AV', 'HIRCW-BOOK', 'HIRCW-CIRC', 'HIRCW-PER', 'HIRCW-REF');
+
+UPDATE staging_items
+	SET owning_lib = 'OSM'
+	WHERE owning_lib = 'HIRC';
+
+UPDATE staging_items
+	SET owning_lib = 'OWA'
+	WHERE owning_lib = 'Leddy';
+
+UPDATE staging_items
+	SET owning_lib = 'OWAL'
+	WHERE owning_lib = 'WINLAW';
+
+UPDATE staging_items
+	SET owning_lib = 'KAP'
+	WHERE location IN (SELECT location FROM staging_items WHERE location LIKE 'HRSTK-%');
+
+UPDATE staging_items
+	SET owning_lib = 'TIMMINS'
+	WHERE location IN (SELECT location FROM staging_items WHERE location LIKE 'HRSTT-%');
+
+UPDATE staging_items
+	SET owning_lib = 'SAH'
+	WHERE location = 'NEORCC';
+
+UPDATE staging_items
+	SET owning_lib = 'XSTRATA'
+	WHERE location = 'FALCON';
+
+-- Map item types
+UPDATE staging_items
+	SET item_type = 'BOOK'
+	WHERE item_type IN ('BOOKS');
+
+UPDATE staging_items
+	SET item_type = 'MICROFORM'
+	WHERE item_type = 'MICROFORMS';
+
+UPDATE staging_items
+	SET item_type = 'NEWSPAPER'
+	WHERE item_type = 'NEWSPAPERS';
+
+-- Map locations
+UPDATE staging_items
+	SET location = 'AV'
+	WHERE location IN ('HIRCE-AV', 'HIRCW-AV');
+
+UPDATE staging_items
+	SET location = 'BOOK'
+	WHERE location IN ('HIRCE-BOOK', 'HIRCW-BOOK');
+
+UPDATE staging_items
+	SET location = 'CIRC'
+	WHERE location IN ('HIRCE-CIRC', 'HIRCW-CIRC');
+
+UPDATE staging_items
+	SET location = 'PER'
+	WHERE location IN ('HIRCE-PER', 'HIRCW-PER');
+
+UPDATE staging_items
+	SET location = 'REF'
+	WHERE location IN ('HIRCE-REF', 'HIRCW-REF');
+
+UPDATE staging_items
+	SET location = 'DOC'
+	WHERE location IN ('HRST-DOC', 'HRSTK-DOC', 'HRSTT-DOC');
+
+UPDATE staging_items
+	SET location = 'EBOOK'
+	WHERE location IN ('HRST-EBOOK', 'HRSTK-EBOOK', 'HRSTT-EBOOK');
+
+UPDATE staging_items
+	SET location = 'PER'
+	WHERE location IN ('HRST-PER', 'HRSTK-PER', 'HRSTT-PER');
+
+UPDATE staging_items
+	SET location = 'PRET'
+	WHERE location IN ('HRST-PRET', 'HRSTK-PRET', 'HRSTT-PRET');
+
+UPDATE staging_items
+	SET location = 'REF'
+	WHERE location IN ('HRST-REF', 'HRSTK-REF', 'HRSTT-REF');
+
+UPDATE staging_items
+	SET location = 'VID'
+	WHERE location IN ('HRST-VID', 'HRSTK-VID', 'HRSTT-VID');
+
+-- First, we build shelving location
+INSERT INTO asset.copy_location (name, owning_lib)
+        SELECT  DISTINCT l.location, ou.id
+          FROM  staging_items l JOIN actor.org_unit ou
+                ON (l.owning_lib = ou.shortname)
+;
+
+-- Create circ modifiers for in-db circulation
+-- This is very, very crude but satisfies the FK constraints
+INSERT INTO config.circ_modifier (code, name, description, sip2_media_type, magnetic_media)
+        SELECT  DISTINCT item_type as code,
+          item_type AS name,
+          LOWER(item_type) AS description,
+          '001' AS sip2_media_type,
+          FALSE AS magnetic_media
+          FROM  staging_items
+          WHERE item_type NOT IN (SELECT code FROM config.circ_modifier);
+
+-- Import call numbers for bibrecord->library mappings
+INSERT INTO asset.call_number (creator,editor,record,label,owning_lib)
+        SELECT  DISTINCT 1, 1, l.bibkey , l.callnum, ou.id
+          FROM  staging_items l
+                JOIN biblio.record_entry b ON (l.bibkey = b.id)
+                JOIN actor.org_unit ou ON (l.owning_lib = ou.shortname);
+
+-- Import base copy data
+INSERT INTO asset.copy (
+        circ_lib, creator, editor, create_date, barcode,
+        status, location, loan_duration,
+        fine_level, circ_modifier, deposit, ref, call_number)
+        SELECT  DISTINCT  ou.id AS circ_lib,
+                1 AS creator,
+                1 AS editor,
+                l.createdate AS create_date,
+                l.barcode AS barcode,
+		CASE
+			WHEN l.location = 'BINDERY' THEN 2
+			WHEN l.location = 'CATALOGING' THEN 11
+			WHEN l.location = 'DISCARD' THEN 13
+			WHEN l.location = 'ILL' THEN 10
+			WHEN l.location = 'INPROCESS' THEN 5
+			WHEN l.location = 'LOST' THEN 3
+			WHEN l.location = 'LONGOVRDUE' THEN 4
+			WHEN l.location = 'MISSING' THEN 4
+			WHEN l.location = 'ON-ORDER' THEN 9
+			WHEN l.location = 'REPAIR' THEN 14
+			ELSE 0
+                END AS status,
+                cl.id AS location,
+                2 AS loan_duration,
+                2 AS fine_level,
+                CASE
+                        WHEN l.item_type IN ('REFERENCE', 'DEPOSIT_BK', 'BOOKS') THEN 'BOOK'
+                        WHEN l.item_type IN ('PERIODICALS') THEN 'PERIODICAL'
+                        ELSE l.item_type
+                END AS circ_modifier,
+                CASE
+                        WHEN l.item_type = 'DEPOSIT_BK' THEN TRUE
+                        ELSE FALSE
+                END AS deposit,
+                CASE
+                        WHEN l.item_type = 'REFERENCE' THEN TRUE
+                        ELSE FALSE
+                END AS ref,
+                cn.id AS call_number
+          FROM  staging_items l
+                JOIN actor.org_unit ou
+                        ON (l.owning_lib = ou.shortname)
+                JOIN asset.copy_location cl
+                        ON (ou.id = cl.owning_lib AND l.location = cl.name)
+                JOIN asset.call_number cn
+                        ON (ou.id = cn.owning_lib AND l.bibkey = cn.record AND l.callnum = cn.label)
+;
+COMMIT;
diff --git a/tools/migration-scripts/lul_import.pl b/tools/migration-scripts/lul_import.pl
new file mode 100644
index 0000000000..15857c7f9f
--- /dev/null
+++ b/tools/migration-scripts/lul_import.pl
@@ -0,0 +1,94 @@
+#!/usr/bin/perl
+use warnings;
+use strict;
+
+=head1 Automated processing of Laurentian system bibliographic records
+
+First we split the records into many smaller files (defined by $chunksize),
+then process a fraction of the records (defined by 1/$denominator) from
+across the set of files.
+
+=cut
+
+my $chunk_size = 1000;
+my $chunk_dir = "chunks";
+my $chunk_prefix = 'chunk_';
+my $denominator = 20;
+
+my $marc = 'bibs.marc';
+my $xml_file = 'bibs.xml';
+my $bre_file = 'bibs.bre';
+my $ingest_file = 'bibs.ingest';
+my $loader_prefix = 'lul_load';
+my $tcn_dumpfile = 'tcn.dump';
+my $script_dir = '/home/lauadmin/Evergreen/Open-ILS/src/extras/import/';
+my $db_user = 'evergreen';
+my $db_pw = '';
+my $db_host = '';
+my $db_name = 'evergreen';
+
+my $input_files;
+
+convert_to_marcxml("true");
+process_bibs("--tcnfield 935 --tcnsubfield a --idfield 935 --idsubfield a --trash 901 --trash 949");
+
+=over
+
+=item convert_to_marcxml(is_marc8)
+
+=back
+
+=cut
+sub convert_to_marcxml {
+	my $is_marc8 = shift;
+	my $marc8_to_utf8;
+
+=over
+
+Split the files up into chunks
+
+=back
+
+=cut
+	if (!-d $chunk_dir) {
+		mkdir($chunk_dir);
+	}
+
+	system("yaz-marcdump -C $chunk_size -s chunks/$chunk_prefix $marc > /dev/null");
+
+=over
+
+Convert selected sample of records to MARC21XML
+
+=back
+
+=cut
+	my @files = glob("$chunk_dir/$chunk_prefix\*");
+
+	foreach my $i (1..$denominator) {
+		my $filenumber = sprintf("%07i", (scalar(@files) / $denominator * $i) - 1);
+		$input_files .= "$chunk_dir/$chunk_prefix$filenumber ";
+	}
+	if ($is_marc8) { 
+		$marc8_to_utf8 = '-f MARC-8 -t UTF-8 -l 9=97';
+	}
+	system("yaz-marcdump -i marc -o marcxml $marc8_to_utf8 $input_files > $xml_file");
+}
+
+=over
+
+=item process_bibs(marc2bre_options)
+
+Starting with a set of MARC21XML records, these commands generate a set of
+SQL files suitable for loading into an Evergreen system.
+
+=back
+
+=cut
+sub process_bibs {
+	my $marc2bre_options = shift;
+
+	system("perl $script_dir/marc2bre.pl --marctype XML --tcn_dumpfile $tcn_dumpfile --db_user $db_user --db_host $db_host --db_pw $db_pw --db_name $db_name $marc2bre_options $xml_file > $bre_file 2> marc2bre.err");
+	system("perl $script_dir/direct_ingest.pl $bre_file > $ingest_file 2> ingest.err");
+	system("perl $script_dir/parallel_pg_loader.pl --output $loader_prefix -or bre -or mrd -or mfr -or mtfe -or mafe -or msfe -or mkfe -or msefe -a mrd -a mfr -a mtfe -a mafe -a msfe -a mkfe -a msefe < $ingest_file 2> loader.err");
+}
diff --git a/tools/migration-scripts/org_units.sql b/tools/migration-scripts/org_units.sql
new file mode 100644
index 0000000000..7bd3a21abf
--- /dev/null
+++ b/tools/migration-scripts/org_units.sql
@@ -0,0 +1,79 @@
+--
+-- PostgreSQL database dump
+--
+
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = off;
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+SET escape_string_warning = off;
+
+SET search_path = actor, pg_catalog;
+
+--
+-- Name: org_unit_type_id_seq; Type: SEQUENCE SET; Schema: actor; Owner: evergreen
+--
+
+SELECT pg_catalog.setval('org_unit_type_id_seq', 100, true);
+
+
+--
+-- Data for Name: org_unit; Type: TABLE DATA; Schema: actor; Owner: evergreen
+--
+
+COPY org_unit (id, parent_ou, ou_type, ill_address, holds_address, mailing_address, billing_address, shortname, name, email, phone, opac_visible) FROM stdin;
+115	102	3	\N	\N	\N	\N	HRSRH	HRSRH Health Sciences Library			t
+123	102	3	\N	\N	\N	\N	NEMHC	Northeast Mental Health Centre	\N	\N	t
+105	102	6	1	1	1	1	LUSYS	Laurentian University			t
+108	105	7	\N	\N	\N	\N	MEDIACEN	Instructional Media Centre			t
+103	105	7	1	1	1	1	OSUL	J.N. Desmarais Library			t
+107	105	7	\N	\N	\N	\N	SUDBURY	University of Sudbury			t
+104	105	7	1	1	1	1	HUNTINGTON	Huntington College Library			t
+117	105	7	\N	\N	\N	\N	MEDB	Mining and the Environment Database			t
+1	\N	1	1	1	1	1	CONIFER	Conifer			t
+102	1	4	1	1	1	1	LAURSYS	Laurentian System			t
+106	1	2	\N	\N	\N	\N	WINDSYS	Windsor System			t
+109	106	3	\N	\N	\N	\N	OWA	Leddy Library			t
+112	102	3	\N	\N	\N	\N	ARTGALL	Art Gallery of Sudbury			t
+113	102	3	\N	\N	\N	\N	CFOF	Centre Franco-Ontarien de Folklore			t
+116	102	3	\N	\N	\N	\N	SAH	Sault Area Hospital			t
+118	102	3	\N	\N	\N	\N	MNDM	Mines Library, Willet Green Miller Centre			t
+119	102	3	\N	\N	\N	\N	XSTRATA	Xstrata Process Support Centre Library			t
+120	102	3	\N	\N	\N	\N	VALE	Vale Inco			t
+122	106	3	\N	\N	\N	\N	OWAL	Paul Martin Law Library			t
+111	1	2	\N	\N	\N	\N	ALGOMASYS	Algoma System	\N	\N	t
+124	111	3	\N	\N	\N	\N	OSTMA	Algoma University, Wishart Library			t
+125	1	2	\N	\N	\N	\N	OSM	NOHIN	\N	\N	t
+126	125	3	\N	\N	\N	\N	NOSMW	Northern Ontario School of Medicine (West)	\N	\N	t
+110	125	3	\N	\N	\N	\N	NOSME	Northern Ontario School of Medicine (East)			t
+114	102	6	\N	\N	\N	\N	HEARSTSYS	Université de Hearst			t
+127	114	7	\N	\N	\N	\N	HEARST	Hearst, Bibliothèque Maurice-Saulnier	\N	\N	t
+128	114	7	\N	\N	\N	\N	KAP	Hearst Kapuskasing, Centre de Ressources	\N	\N	t
+129	114	7	\N	\N	\N	\N	TIMMINS	Hearst Timmins, Centre de Ressources	\N	\N	t
+130	105	7	\N	\N	\N	\N	CRC	Curriculum Resource Centre	\N	\N	t
+131	105	7	\N	\N	\N	\N	MRC	Music Resource Centre	\N	\N	t
+132	105	7	\N	\N	\N	\N	LDCR	Laboratoire de didactiques, E.S.E.	\N	\N	t
+\.
+
+
+
+--
+-- Data for Name: org_unit_type; Type: TABLE DATA; Schema: actor; Owner: evergreen
+--
+
+COPY org_unit_type (id, name, opac_label, depth, parent, can_have_vols, can_have_users) FROM stdin;
+1	Consortium	Everywhere	0	\N	f	f
+2	System	University Libraries	1	1	f	f
+4	LU System	University and Partners	1	1	f	f
+5	Bookmobile	Your Bookmobile	3	3	t	t
+7	University-Library	University Library	3	6	t	t
+3	Branch	This Branch	2	2	t	t
+6	University	Campus Libraries	2	2	t	t
+\.
+
+
+--
+-- PostgreSQL database dump complete
+--
+
+SELECT SETVAL('actor.org_unit_id_seq', (SELECT MAX(id) FROM actor.org_unit));
diff --git a/tools/migration-scripts/patron_groups.sql b/tools/migration-scripts/patron_groups.sql
new file mode 100644
index 0000000000..a8bf17bf55
--- /dev/null
+++ b/tools/migration-scripts/patron_groups.sql
@@ -0,0 +1,58 @@
+BEGIN;
+
+-- Set up patron groups and permissions
+
+INSERT INTO permission.grp_tree (name, parent, usergroup, perm_interval, description, application_perm) VALUES ('Faculty', 2, 't', '1 year', 'Faculty', 'group_application.user.patron');
+INSERT INTO permission.grp_tree (name, parent, usergroup, perm_interval, description, application_perm) VALUES ('Graduate', 2, 't', '1 year', 'Graduate', 'group_application.user.patron');
+INSERT INTO permission.grp_tree (name, parent, usergroup, perm_interval, description, application_perm) VALUES ('Undergraduate', 2, 't', '1 year', 'Undergraduate', 'group_application.user.patron');
+INSERT INTO permission.grp_tree (name, parent, usergroup, perm_interval, description, application_perm) VALUES ('Readers', 2, 't', '1 year', 'Readers', 'group_application.user.patron');
+INSERT INTO permission.grp_tree (name, parent, usergroup, perm_interval, description, application_perm) VALUES ('Staff members', 2, 't', '1 year', 'Staff members', 'group_application.user.patron');
+
+-- (11 = Faculty, 12 = Graduate, 13 = Undergraduate, 14 = Reader, 15 = Staff members)
+
+-- Not really necessary, unless you want to restrict the ability to add
+INSERT INTO permission.perm_list (code, description) VALUES
+    ('group_application.user.patron.faculty', 'Allow a user to add/remove users to/from the "Faculty" group'),
+    ('group_application.user.patron.grad', 'Allow a user to add/remove users to/from the "Graduate students" group'),
+    ('group_application.user.patron.undergrad', 'Allow a user to add/remove users to/from the "Undergraduate students" group')
+    ('group_application.user.patron.reader', 'Allow a user to add/remove users to/from the "Readers" group'),
+    ('group_application.user.patron.staff', 'Allow a user to add/remove users to/from the "Staff members" group'),
+;
+
+-- Give circulators the ability to abort transits
+-- ABORT_TRANSIT = perm 111, ABORT_REMOTE_TRANSIT = perm 112
+INSERT INTO permission.grp_perm_map (grp, perm, depth)
+    VALUES (5, 111, 2), (5, 112, 2);
+
+-- Enable all staff to cancel holds
+INSERT INTO permission.grp_perm_map (grp, perm, depth)
+    VALUES (3, 114, 0);
+
+-- Enable cataloguers to import bib records
+-- Cataloguer profile group = 4
+-- Depth (how much of the org_tree does the user have this permission over) = 0 (the whole thing)
+INSERT INTO permission.grp_perm_map (grp, depth, perm)
+    SELECT 4, 0, id from permission.perm_list
+    WHERE code LIKE '%IMPORT%' AND id NOT IN (
+        SELECT perm FROM permission.grp_perm_map
+        WHERE grp = 4
+    )
+;
+
+-- Grant ability to merge bib records to cataloguers
+INSERT INTO permission.grp_perm_map (grp, perm, depth)
+    VALUES (4, 230, 0); 
+
+-- Grant ability to delete bib records to cataloguers
+INSERT INTO permission.grp_perm_map (grp, perm, depth)
+    VALUES (4, 153, 0); 
+
+-- Grant ability to add, delete, or update closing dates
+INSERT INTO permission.grp_perm_map (grp, perm, depth)
+    VALUES (10, 117, 1), (10, 116, 1), (10, 118, 1); 
+
+-- Restrict visibility of patrons to staff in related institutions
+UPDATE permission.grp_perm_map SET depth = 2
+    WHERE grp = 3 AND perm = 31; 
+
+COMMIT;
diff --git a/tools/migration-scripts/windsor_import.pl b/tools/migration-scripts/windsor_import.pl
new file mode 100644
index 0000000000..f6561718f0
--- /dev/null
+++ b/tools/migration-scripts/windsor_import.pl
@@ -0,0 +1,94 @@
+#!/usr/bin/perl
+use warnings;
+use strict;
+
+=head1 Automated processing of Windsor system bibliographic records
+
+First we split the records into many smaller files (defined by $chunksize),
+then process a fraction of the records (defined by 1/$denominator) from
+across the set of files.
+
+=cut
+
+my $chunk_size = 1000;
+my $chunk_dir = "chunks";
+my $chunk_prefix = 'chunk_';
+my $denominator = 20;
+
+my $marc = 'windsor_bibs.marc';
+my $xml_file = 'windsor_bibs.xml';
+my $bre_file = 'bibs.bre';
+my $ingest_file = 'bibs.ingest';
+my $loader_prefix = 'windsor_load';
+my $tcn_dumpfile = 'tcn.dump';
+my $script_dir = '/home/lauadmin/Evergreen-trunk/Open-ILS/src/extras/import/';
+my $db_user = 'evergreen';
+my $db_pw = '';
+my $db_host = '';
+my $db_name = '';
+
+my $input_files;
+
+convert_to_marcxml("false");
+process_bibs("--tcnfield 001 --idfield 001");
+
+=over
+
+=item convert_to_marcxml(is_marc8)
+
+=back
+
+=cut
+sub convert_to_marcxml {
+	my $is_marc8 = shift;
+	my $marc8_to_utf8;
+
+=over
+
+Split the files up into chunks
+
+=back
+
+=cut
+	if (!-d $chunk_dir) {
+		mkdir($chunk_dir);
+	}
+
+	system("yaz-marcdump -C $chunk_size -s chunks/$chunk_prefix $marc > /dev/null");
+
+=over
+
+Convert selected sample of records to MARC21XML
+
+=back
+
+=cut
+	my @files = glob("$chunk_dir/$chunk_prefix\*");
+
+	foreach my $i (1..$denominator) {
+		my $filenumber = sprintf("%07i", (scalar(@files) / $denominator * $i) - 1);
+		$input_files .= "$chunk_dir/$chunk_prefix$filenumber ";
+	}
+	if ($is_marc8) { 
+		$marc8_to_utf8 = '-f MARC-8 -t UTF-8 -l 9=97';
+	}
+	system("yaz-marcdump -i marc -o marcxml $marc8_to_utf8 $input_files > $xml_file");
+}
+
+=over
+
+=item process_bibs(marc2bre_options)
+
+Starting with a set of MARC21XML records, these commands generate a set of
+SQL files suitable for loading into an Evergreen system.
+
+=back
+
+=cut
+sub process_bibs {
+	my $marc2bre_options = shift;
+
+	system("perl $script_dir/windsor_marc2bre.pl --marctype XML --tcn_dumpfile $tcn_dumpfile --db_user $db_user --db_host $db_host --db_pw $db_pw --db_name $db_name $marc2bre_options $xml_file > $bre_file 2> marc2bre.err");
+	system("perl $script_dir/direct_ingest.pl $bre_file > $ingest_file 2> ingest.err");
+	system("perl $script_dir/parallel_pg_loader.pl --output $loader_prefix -or bre -or mrd -or mfr -or mtfe -or mafe -or msfe -or mkfe -or msefe -a mrd -a mfr -a mtfe -a mafe -a msfe -a mkfe -a msefe < $ingest_file 2> loader.err");
+}
diff --git a/tools/migration-scripts/windsor_patron_load_base.csv b/tools/migration-scripts/windsor_patron_load_base.csv
new file mode 100644
index 0000000000..978a591c27
--- /dev/null
+++ b/tools/migration-scripts/windsor_patron_load_base.csv
@@ -0,0 +1 @@
+1	FAC	dbs@example.com	12345000012345	1	Other	12345000012345	\N	DAN	B	SCOTT	\N	555-5555 x.	\N	\N	Must have Card	Leddy	t	f	f	9-Jun-97	30-Apr-11
diff --git a/tools/migration-scripts/windsor_patrons.sql b/tools/migration-scripts/windsor_patrons.sql
new file mode 100644
index 0000000000..46ca2a662e
--- /dev/null
+++ b/tools/migration-scripts/windsor_patrons.sql
@@ -0,0 +1,260 @@
+DROP TABLE staging_patron;
+DROP TABLE staging_patron_address;
+DROP TABLE staging_barcode;
+DROP TABLE staging_note;
+DROP TABLE staging_mobile_phone;
+DROP TABLE staging_other_phone;
+
+BEGIN;
+
+CREATE TABLE staging_patron (idfield SERIAL NOT NULL, username TEXT, profile TEXT, identity_type TEXT, password TEXT, standing INTEGER, identity_type2 TEXT, identity_value TEXT, name_prefix TEXT, first_given_name TEXT, second_given_name TEXT, family_name TEXT, name_suffix TEXT, day_phone TEXT, evening_phone TEXT, other_phone TEXT, alert_message TEXT, home_library TEXT, active BOOLEAN, barred BOOLEAN, deleted BOOLEAN, create_date DATE, expire_date DATE);
+CREATE TABLE staging_patron_address (idfield SERIAL NOT NULL, barcode TEXT, address_type TEXT, street_1 TEXT, street_2 TEXT, city TEXT, county TEXT, province TEXT, country TEXT, postal_code TEXT);
+CREATE TABLE staging_barcode (idfield SERIAL NOT NULL, barcode TEXT, old_barcode TEXT, active BOOLEAN);
+CREATE TABLE staging_note (idfield SERIAL NOT NULL, barcode TEXT, create_date TEXT, publicly_visible BOOLEAN, title TEXT, note TEXT, create_date2 TEXT);
+CREATE TABLE staging_mobile_phone (idfield SERIAL NOT NULL, barcode TEXT, phone TEXT);
+CREATE TABLE staging_other_phone (idfield SERIAL NOT NULL, barcode TEXT, phone TEXT);
+
+COPY staging_patron (username, profile, identity_type, password, standing, identity_type2, identity_value, name_prefix, first_given_name, second_given_name, family_name, name_suffix, day_phone, evening_phone, other_phone, alert_message, home_library, active, barred, deleted, create_date, expire_date) FROM '/home/dbs/conifer/windsor_patron_load_base.csv';
+COPY staging_patron_address (barcode, address_type, street_1, street_2, city, county, province, country, postal_code) FROM '/home/dbs/conifer/windsor_patron_load_addresses.csv';
+COPY staging_barcode (barcode, old_barcode, active) FROM '/home/dbs/conifer/windsor_patron_load_barcodes.csv';
+COPY staging_note (barcode, create_date, publicly_visible, title, note, create_date2) FROM '/home/dbs/conifer/windsor_patron_load_notes.csv';
+COPY staging_mobile_phone (barcode, phone) FROM '/home/dbs/conifer/windsor_patron_load_phones_mobile.csv';
+COPY staging_other_phone (barcode, phone) FROM '/home/dbs/conifer/windsor_patron_load_phones_other.csv';
+
+COMMIT;
+
+---- Let's find our duplicate usernames
+--SELECT trim(both from username), identity_value, COUNT(username) as ucount
+--FROM staging_patron
+--GROUP BY username, identity_value
+--HAVING COUNT(username) > 1
+--ORDER BY ucount DESC;
+--
+---- Now let's find our duplicate barcodes
+--SELECT trim(both from username), identity_value, COUNT(identity_value) as ucount
+--FROM staging_patron
+--GROUP BY username, identity_value 
+--HAVING COUNT(identity_value) > 1
+--ORDER BY ucount DESC;
+--
+---- Get the distinct set of values for dupe usernames
+---- including active/barred/deleted status, just in case
+--SELECT DISTINCT trim(both from username), identity_value, active, barred, deleted
+--FROM staging_patron
+--WHERE username IN (
+--    SELECT username 
+--    FROM staging_patron
+--    GROUP BY username
+--    HAVING count(username) > 1
+--);
+--
+---- Do the barcodes for dupe usernames exist over in the staging_barcode table?
+--SELECT DISTINCT TRIM(p.username) AS uname, p.identity_value, 
+--    CASE
+--        WHEN p.identity_value IN (SELECT barcode FROM staging_barcode WHERE barcode = p.identity_value AND active = 't') THEN 'active new'
+--        WHEN p.identity_value IN (SELECT barcode FROM staging_barcode WHERE old_barcode = p.identity_value AND active = 't') THEN 'active old'
+--        WHEN p.identity_value IN (SELECT barcode FROM staging_barcode WHERE barcode = p.identity_value AND active = 'f') THEN 'inactive new'
+--        WHEN p.identity_value IN (SELECT barcode FROM staging_barcode WHERE old_barcode = p.identity_value AND active = 'f') THEN 'inactive old'
+--        ELSE 'not found'
+--    END AS barcode_state
+--    FROM staging_patron p
+--    WHERE p.identity_value IN (
+--        SELECT DISTINCT identity_value
+--        FROM staging_patron
+--        WHERE username IN (
+--            SELECT username 
+--            FROM staging_patron
+--            GROUP BY username
+--            HAVING COUNT(username) > 1
+--        )
+--    )
+--    ORDER BY uname
+--;
+--
+-- Get rid of the username dupes in a savage manner; last one entered wins
+DELETE FROM staging_patron
+    WHERE idfield NOT IN (
+        SELECT MAX(dt.idfield)
+
+        FROM staging_patron dt
+        GROUP BY dt.username
+    )
+;
+
+DELETE FROM staging_patron
+    WHERE idfield NOT IN (
+        SELECT MAX(dt.idfield)
+
+        FROM staging_patron dt
+        GROUP BY dt.identity_value
+    )
+;
+
+-- And get rid of duplicate (old) barcodes
+DELETE FROM staging_barcode
+    WHERE idfield NOT IN (
+        SELECT MAX(dt.idfield)
+
+        FROM staging_barcode dt
+        GROUP BY dt.old_barcode
+    )
+;
+
+SELECT COUNT(*) FROM staging_patron;
+
+BEGIN;
+
+INSERT INTO actor.usr (profile, usrname, passwd, standing, ident_type, ident_value, first_given_name, second_given_name, family_name, day_phone, home_ou, active, barred, deleted, alert_message, create_date, expire_date)
+    SELECT DISTINCT
+        CASE
+            -- Faculty
+            WHEN trim(both from patron.profile) IN ('AFAC', 'FAC', 'LAW FACLTY') THEN 11
+            -- Graduate student
+            WHEN trim(both from patron.profile) IN ('AGRAD', 'GRAD') THEN 12
+            -- Undergraduate student
+            WHEN trim(both from patron.profile) IN ('AUND', 'UND') THEN 13
+            -- Readers (obviously need to map these to something better
+            WHEN trim(both from patron.profile) = 'DIRB' THEN 14
+            WHEN trim(both from patron.profile) = 'EXAL' THEN 14
+            WHEN trim(both from patron.profile) = 'EXEC' THEN 14
+            WHEN trim(both from patron.profile) = 'EXOT' THEN 14
+            WHEN trim(both from patron.profile) = 'ILL' THEN 14
+            WHEN trim(both from patron.profile) = 'LAW1' THEN 14
+            WHEN trim(both from patron.profile) = 'LAW2' THEN 14
+            WHEN trim(both from patron.profile) = 'LAW3' THEN 14
+            WHEN trim(both from patron.profile) = 'LAW COUR' THEN 14
+            WHEN trim(both from patron.profile) = 'LAW DAY365' THEN 14
+            WHEN trim(both from patron.profile) = 'LAW KEY2' THEN 14
+            WHEN trim(both from patron.profile) = 'STAF' THEN 14
+            WHEN trim(both from patron.profile) IS NULL THEN 14
+            ELSE 14
+        END AS profile, 
+        trim(both from patron.username) AS usrname,
+        trim(both from patron.password) AS passwd,
+        CASE
+            WHEN patron.standing = 0 THEN 2 -- interpreted as "Barred"?
+            ELSE 1
+        END AS standing,
+        CASE
+            WHEN patron.identity_type = 'Other' THEN 3
+        END AS ident_type,
+        trim(both from patron.identity_value) AS ident_value,
+        trim(both from patron.first_given_name) AS first_given_name,
+        CASE
+            WHEN trim(both from patron.second_given_name) != '' THEN patron.second_given_name
+            ELSE NULL
+        END AS second_given_name,
+        trim(both from patron.family_name) AS family_name,
+        CASE
+            WHEN trim(both from patron.day_phone) != '' THEN patron.day_phone
+            ELSE NULL
+        END AS day_phone,
+        CASE
+            WHEN trim(both from patron.home_library) = 'Leddy' THEN 109
+            WHEN trim(both from patron.home_library) = 'Law' THEN 122
+            ELSE 109
+        END AS home_ou,
+        patron.active as active,
+        patron.barred as barred,
+        patron.deleted as deleted,
+        CASE
+            WHEN trim(both from patron.alert_message) != '' THEN patron.alert_message
+            ELSE NULL
+        END AS alert_message,
+        patron.create_date::DATE as create_date,
+        patron.expire_date::DATE as expire_date
+    FROM staging_patron patron
+;
+
+-- And add our mobile and other phones to the patron record
+-- Mapping Windsor's "other" to evening_phone in Evergreen
+UPDATE actor.usr au 
+    SET evening_phone = TRIM(sop.phone)
+    FROM staging_other_phone sop
+    WHERE sop.barcode = au.ident_value
+;
+
+-- Mapping Windsor's "mobile" to other_phone in Evergreen
+UPDATE actor.usr au 
+    SET day_phone = TRIM(smp.phone)
+    FROM staging_mobile_phone smp
+    WHERE smp.barcode = au.ident_value
+;
+
+-- Now we need to generate actor.card entries
+-- And be careful to distinguish Windsor vs. existing entries
+INSERT INTO actor.card (usr, barcode, active) 
+    SELECT au.id, au.ident_value, 't'
+    FROM actor.usr au
+    WHERE au.ident_value IS NOT NULL
+    AND au.home_ou IN (109, 122)
+    AND au.ident_value NOT IN (SELECT barcode FROM actor.card)
+    AND au.id > 1
+;
+
+UPDATE actor.usr au SET card = ac.id
+    FROM actor.card ac
+    WHERE ac.barcode = au.ident_value
+    AND au.card IS NULL
+    AND au.id > 1
+;
+
+-- Get rid of the "old" barcodes that we inserted into actor.usr
+DELETE FROM staging_barcode
+    WHERE old_barcode IN (SELECT barcode FROM actor.card);
+
+INSERT INTO actor.card (usr, barcode, active)
+    SELECT au.id, sb.old_barcode, sb.active
+    FROM staging_barcode sb
+    INNER JOIN actor.usr au
+    ON (sb.barcode = au.ident_value AND au.id > 1)
+    WHERE sb.old_barcode IS NOT NULL
+    AND sb.old_barcode NOT IN (SELECT barcode FROM actor.card)
+;
+
+UPDATE staging_patron_address
+    SET county = NULL
+    WHERE TRIM(county) = '';
+
+UPDATE staging_patron_address
+    SET address_type = NULL
+    WHERE TRIM(address_type) = ''
+;
+
+INSERT INTO actor.usr_address (usr, address_type, street1, street2, city, state, county, country, post_code) 
+    SELECT DISTINCT
+        ac.usr,
+        TRIM(sa.address_type),
+        TRIM(sa.street_1),
+        TRIM(sa.street_2),
+        TRIM(sa.city),
+        TRIM(sa.province),
+        TRIM(sa.county),
+        TRIM(sa.country),
+        TRIM(sa.postal_code)
+    FROM staging_patron_address sa
+    INNER JOIN actor.card ac ON (ac.barcode = sa.barcode)
+;
+
+-- This is how we're getting this set of dates
+SET DateStyle TO 'DMY';
+
+INSERT INTO actor.usr_note (usr, creator, create_date, pub, title, value)
+    SELECT
+        ac.usr,
+        1, -- We don't have the real creator in the staging table, so make it admin
+        CASE
+            WHEN TRIM(create_date) != '' THEN sn.create_date::DATE
+            ELSE '01-May-00'::DATE
+        END AS create_date,
+        sn.publicly_visible,
+        TRIM(sn.title),
+        TRIM(sn.note)
+    FROM staging_note sn
+    INNER JOIN actor.card ac ON (ac.barcode = sn.barcode)
+;
+
+--COMMIT;
+--ROLLBACK;
+
+-- vim: et:ts=4:sw=4:
diff --git a/tools/patch_conifer.sh b/tools/patch_conifer.sh
new file mode 100644
index 0000000000..cec9f37771
--- /dev/null
+++ b/tools/patch_conifer.sh
@@ -0,0 +1,171 @@
+#!/bin/bash
+
+REMOTE_CANONICAL_SVN_DIR=svn://svn.open-ils.org/ILS/branches/rel_1_6_1
+CANONICAL_SVN_DIR=/home/opensrf/Evergreen-rel_1_6_1
+CANONICAL_EXPORT_DIR=/home/opensrf/EG_1_6_1-export
+REMOTE_SVN_DIR=svn://svn.open-ils.org/ILS-Contrib/conifer
+LOCAL_SVN_DIR=/home/opensrf/conifer-svn
+LOCAL_EXPORT_DIR=/home/opensrf/conifer-export
+INSTALL_DIR=/openils
+BRANCH=branches/rel_1_6_1
+
+update_repository()
+{
+	# Create a copy of the Evergreen source if it doesn't exist
+	if [[ -d "$CANONICAL_SVN_DIR" ]]
+	then echo -n
+	else
+		svn co -q "$REMOTE_CANONICAL_SVN_DIR" "$CANONICAL_SVN_DIR"
+	fi
+
+	# Create a copy of the Conifer customized source if it doesn't exist
+	if [[ -d "$LOCAL_SVN_DIR" ]]
+	then echo -n
+	else
+		svn co -q "$REMOTE_SVN_DIR" "$LOCAL_SVN_DIR"
+	fi
+
+	# Update our copy of the Evergreen source
+	svn up -q "$CANONICAL_SVN_DIR"
+
+	# Update our copy of the Conifer customized files
+	svn up -q "$REMOTE_SVN_DIR"
+
+	# Export a clean copy of the SVN files
+	# We have to delete the target directory first
+	rm -fr "$LOCAL_EXPORT_DIR"
+	svn export -q "$LOCAL_SVN_DIR" "$LOCAL_EXPORT_DIR"
+	rm -fr "$CANONICAL_EXPORT_DIR"
+	svn export -q "$CANONICAL_SVN_DIR" "$CANONICAL_EXPORT_DIR"
+}
+
+copy_web_common()
+{
+	# Copy the impossible-to-be-harmful (really!) Web stuff in place
+	# Images
+	cp -r "$LOCAL_EXPORT_DIR"/"$BRANCH"/web/opac/images/* "$INSTALL_DIR"/var/web/opac/images/.
+	# DTD files for replacement strings
+	cp -r "$LOCAL_EXPORT_DIR"/"$BRANCH"/web/opac/locale/* "$INSTALL_DIR"/var/web/opac/locale/.
+	# Themes
+	cp -r "$LOCAL_EXPORT_DIR"/"$BRANCH"/web/opac/theme/* "$INSTALL_DIR"/var/web/opac/theme/.
+}
+
+copy_opac_skins()
+{
+	SKINS=`cd "$LOCAL_EXPORT_DIR"/"$BRANCH"/web/opac/skin; ls -1`
+	# Get rid of "default", add "default_conifer" first
+	SKINS=`echo "default_conifer $SKINS" | sed -e 's/default //' -`
+	LINKS=`cd "$INSTALL_DIR"/var/web/opac/skin/default/; find xml -type l -print`
+
+	# For each skin:
+	# 1. Copy the default skin in place first
+	# 2. Copy our custom default files over top of default_conifer, creating default_conifer
+	# 3. Copy the default_conifer skin into place as "$skin"
+	# 4. Copy the custom skin files over top "$skin"
+
+	# Munge the links to point to default_conifer for rdetail.js
+	# Munge the links to point to "$skin" for all
+	for skin in $SKINS
+	do
+		rm -fr "$INSTALL_DIR"/var/web/opac/skin/"$skin"
+		if [ "$skin" == "default_conifer" ]
+		then
+			cp -r "$CANONICAL_EXPORT_DIR"/Open-ILS/web/opac/skin/default "$INSTALL_DIR"/var/web/opac/skin/"$skin"
+			cp -r "$LOCAL_EXPORT_DIR"/"$BRANCH"/web/opac/skin/default/* "$INSTALL_DIR"/var/web/opac/skin/"$skin"/.
+		else
+			cp -r "$INSTALL_DIR"/var/web/opac/skin/default_conifer "$INSTALL_DIR"/var/web/opac/skin/"$skin"
+			cp -r "$LOCAL_EXPORT_DIR"/"$BRANCH"/web/opac/skin/"$skin"/* "$INSTALL_DIR"/var/web/opac/skin/"$skin"/.
+		fi
+
+		for xml_link in $LINKS
+		do
+			cd "$INSTALL_DIR"/var/web/opac/skin/"$skin"/xml
+			ln -sf index.xml `basename $xml_link`
+		done
+
+		# Point at theme-specific JS, as we have our own copies
+		for munge_file in 'page_rdetail.xml' 'page_mresult.xml' 'page_rresult.xml'
+		do
+			sed -i -e "s/skin\/default_conifer\/js/skin\/$skin\\/js/" "$INSTALL_DIR"/var/web/opac/skin/"$skin"/xml/"$munge_file"
+			sed -i -e "s/skin\/default_conifer\/js/skin\/$skin\/js/" "$INSTALL_DIR"/var/web/opac/skin/"$skin"/xml/"$munge_file"
+		done
+		sed -i -e "s/\/$skin\/css\/layout.css/\/$skin\\/css\/layout.css/" "$INSTALL_DIR"/var/web/opac/skin/"$skin"/xml/common/css_common.xml
+
+		sed -i -e 's/var googleBooksLink = true/var googleBooksLink = false/' "$INSTALL_DIR"/var/web/opac/skin/"$skin"/js/result_common.js
+
+		# Make global config default to grouping metarecords and formats
+		# sed -i -e 's/var SHOW_MR_DEFAULT = false/var SHOW_MR_DEFAULT = true/' "$INSTALL_DIR"/var/web/opac/common/js/config.js
+		
+	done
+}
+
+# We need the Canuck patch in place. Save us, Canuck patch!
+# This also gives us Arial 12 bold call numbers. w00t.
+copy_xul()
+{
+	cp "$LOCAL_EXPORT_DIR"/"$BRANCH"/xul/server/cat/* "$INSTALL_DIR"/var/web/xul/server/cat/.
+	cp "$LOCAL_EXPORT_DIR"/"$BRANCH"/xul/server/patron/* "$INSTALL_DIR"/var/web/xul/server/patron/.
+	cp "$LOCAL_EXPORT_DIR"/"$BRANCH"/xul/server/skin/* "$INSTALL_DIR"/var/web/xul/server/skin/.
+}
+
+# Update our circ rules
+copy_circ_rules()
+{
+	for circ_rule in `ls -1 "$LOCAL_EXPORT_DIR"/"$BRANCH"/circ/*`
+	do
+		base_circ=`basename "$circ_rule"`
+		if cmp --silent "$circ_rule" "$INSTALL_DIR"/var/circ/"$base_circ"; then 
+			echo -n
+		else
+			cp -d --backup=numbered "$circ_rule" "$INSTALL_DIR"/var/circ/.
+		fi
+	done
+}
+
+# Bring our custom templates for MARC editor and such into play
+copy_templates()
+{
+	cp "$LOCAL_EXPORT_DIR"/"$BRANCH"/templates/marc/* "$INSTALL_DIR"/var/templates/marc/.
+}
+
+# Corresponding with some configuration changes, we also have to customize some Perl modules
+# And similarly, we might want to only trigger these manually
+copy_perl_modules()
+{
+	for perl_mod in `cd "$LOCAL_EXPORT_DIR"/"$BRANCH"/src/perlmods/; find OpenILS -name "*.pm" -print0`
+	do
+		if cmp "$LOCAL_EXPORT_DIR"/"$BRANCH"/src/perlmods/"$perl_mod" "$LOCAL_EG_DIR"/lib/perl5/"$perl_mod" &> /dev/null
+		then echo -n
+		else
+			cp --backup=numbered "$perl_mod" "$INSTALL_DIR"/lib/perl5/"$perl_mod"
+			osrf_ctl.sh -a restart_perl > /dev/null
+		fi
+	done
+
+}
+
+# Not sure if we really want to do this in an automated fashion, because it would mean:
+# 1. restarting Perl and possibly C services to pick up the new configuration file(s)
+# 2. running autogen.sh to generate updates for fm_IDL.xml
+# And it could potentially break the whole thing.
+copy_conf_files()
+{
+	for conf in `ls -1 "$LOCAL_EXPORT_DIR"/"$BRANCH"/conf/*`
+	do
+		if cmp "$LOCAL_EXPORT_DIR"/"$BRANCH"/conf/"$conf" "$LOCAL_EG_DIR"/conf/"$conf" &> /dev/null
+		then echo -n
+		else
+			cp --backup=numbered "$LOCAL_EXPORT_DIR"/"$BRANCH"/conf/"$conf" "$INSTALL_DIR"/conf/.
+		fi
+	done
+}
+
+# Here end the functions; let the fun begin
+
+update_repository
+copy_web_common
+copy_opac_skins
+copy_circ_rules
+copy_xul
+#copy_perl_modules
+#copy_conf_files
diff --git a/tools/patron-load/lu_student_data.pl b/tools/patron-load/lu_student_data.pl
new file mode 100644
index 0000000000..33d5dffb1c
--- /dev/null
+++ b/tools/patron-load/lu_student_data.pl
@@ -0,0 +1,166 @@
+#!/usr/bin/perl
+use strict;
+use warnings;
+
+my $lsd = 'scratchpad.lu_student_data';
+my $lda = 'scratchpad.lu_deleted_accounts'; 
+my $lec = 'scratchpad.lu_email_changes';
+
+sub mod10_checksum {
+    my $barcode = shift;
+    my $total = 0;
+    my $position = 0;
+    foreach my $digit (split('', $barcode)) {
+        $position++;
+        if ($position % 2) {
+            # Double it
+            $digit *= 2;
+            # If less than 10, add to the total
+            if ($digit < 10) {
+                $total += $digit;
+            } else {
+                $total += $digit - 9;
+            }
+        } else {
+            $total += $digit;
+        }
+    }
+    my $rem = $total % 10;
+    if ($rem) {
+        return 10 - $rem;
+    }
+    return $rem;
+}
+
+# Data structure from LU CTS:
+# "STUDENTS_ID"	"STU_LAST_NAME"	"STU_L09_FIRST_NAME"	"STU_L09_CAST_PROGRAM"	"STU_CURRENT_ACAD_LEVELS"	"STU_L09_GET_LAU_EMAIL"	"STU_L09_GET_NON_LAU_EMAIL"	"PERSON_L09_PERM_ADDR_LINES"	"PERSON_L09_PERM_ZIP"	"PERSON_L09_PERM_CITY"	"PERSON_PRIMARY_LANGUAGE"	"STU_TERMS"
+
+print <) {
+    if (!$line) {
+        $line++;
+        next;
+    }
+    # Remove quotes around columns
+    s/^"//;    
+    s/"$//;    
+    s/"\t"/\t/g;
+    s/"\t/\t/g;
+    s/\t"/\t/g;
+    # Represent NULL values
+    s/\t\t/\t\\N\t/g;
+    # Escape single-quotes
+    s/'/''/g;
+    chomp;
+
+    my @studata = split(/\t/);
+    my $barcode = '000070' . shift(@studata);
+    
+    print $barcode, mod10_checksum($barcode) . "\t";
+    print join("\t", @studata) . "\n";
+}
+print "\\.\n";
+
+print < lsd.lu_email
+        AND lsd.lu_email IS NOT NULL
+        AND au.email NOT ILIKE '%normed.ca%'
+;
+INSERT INTO $lec (usr, before, after)
+    SELECT lsd.usr, au.email, lsd.lu_email
+    FROM actor.usr au
+        INNER JOIN $lsd lsd ON lsd.usr = au.id
+    WHERE au.email <> lsd.lu_email
+        AND lsd.lu_email IS NOT NULL
+        AND au.email NOT ILIKE '%normed.ca%'
+;
+UPDATE actor.usr SET email = lu_email
+    FROM $lsd
+    WHERE $lsd.usr = actor.usr.id
+        AND $lsd.lu_email IS NOT NULL
+        AND email NOT ILIKE '%normed.ca%'
+;
+SELECT 'Number of users with mismatched email addresses (after): ', COUNT(*)
+    FROM actor.usr au
+        INNER JOIN $lsd lsd ON au.id = lsd.usr
+    WHERE au.email <> lsd.lu_email
+        AND lsd.lu_email IS NOT NULL
+        AND au.email NOT ILIKE '%normed.ca%'
+;
+
+-- Update active state based on Datatel - for undergrad (13) and graduate students (12) only, registered at Desmarais (103)
+SELECT 'Number of active UG and GRAD students (before): ', COUNT(*)
+    FROM actor.usr
+    WHERE profile IN (12,13) AND home_ou = 103 AND active = TRUE
+;
+INSERT INTO $lda (usr) SELECT usr FROM $lsd lsd INNER JOIN actor.usr au ON au.id = lsd.usr WHERE au.home_ou = 103 AND au.profile IN (12, 13) AND term <> '2010AW';
+UPDATE actor.usr SET active = FALSE, expire_date = NOW() WHERE home_ou = 103 AND profile IN (12, 13);
+
+-- Set expiry date to Sept 30th of next year 
+UPDATE actor.usr SET active = TRUE, expire_date = (extract(year FROM NOW()) + 1 || '-09-30')::date
+    WHERE home_ou = 103 AND profile IN (12, 13)
+    AND id IN (SELECT usr FROM $lsd WHERE term = '2010AW')
+;
+SELECT 'Number of active UG and GRAD students (after): ', COUNT(*)
+    FROM actor.usr
+    WHERE profile IN (12,13) AND home_ou = 103 AND active = TRUE
+;
+
+-- Create or update language preference - currently stored as a actor.stat_cat
+SELECT 'Language preferences before:';
+SELECT au.home_ou, stat_cat_entry, COUNT(stat_cat_entry)
+    FROM actor.stat_cat_entry_usr_map INNER JOIN actor.usr au ON au.id = target_usr
+    GROUP BY au.home_ou, stat_cat_entry
+    ORDER BY 1, 2, 3 DESC
+;
+-- Clean up language inconsistencies
+UPDATE actor.stat_cat_entry_usr_map
+    SET stat_cat_entry = 'English'
+    WHERE stat_cat_entry = 'ENGLISH'
+;
+UPDATE actor.stat_cat_entry_usr_map
+    SET stat_cat_entry = 'français'
+    WHERE stat_cat_entry IN ('FRANCAIS', 'French', 'french')
+;
+INSERT INTO actor.stat_cat_entry_usr_map (stat_cat, target_usr, stat_cat_entry)
+    SELECT DISTINCT 2, usr, CASE WHEN lang = 'F' THEN 'français' ELSE 'English' END
+        FROM $lsd
+        WHERE usr NOT IN (SELECT target_usr FROM actor.stat_cat_entry_usr_map)
+;
+UPDATE actor.stat_cat_entry_usr_map SET stat_cat_entry = 'français'
+     WHERE target_usr IN (SELECT usr FROM $lsd WHERE lang = 'F')
+;
+SELECT 'Language preferences after:';
+SELECT au.home_ou, stat_cat_entry, COUNT(stat_cat_entry)
+    FROM actor.stat_cat_entry_usr_map INNER JOIN actor.usr au ON au.id = target_usr
+    GROUP BY au.home_ou, stat_cat_entry
+    ORDER BY 1, 2, 3 DESC
+;
+HERE
+
diff --git a/tools/sip_user.sql b/tools/sip_user.sql
new file mode 100644
index 0000000000..9b3759238f
--- /dev/null
+++ b/tools/sip_user.sql
@@ -0,0 +1,29 @@
+-- Basic SQL for setting up SIP client user group and associated permissions
+BEGIN;
+
+-- Create the SIP user group
+INSERT INTO permission.grp_tree (name, parent, usergroup, perm_interval, description, application_perm)
+	VALUES ('SIP-Client', 1, 't', '3 years', 'SIP server interactions', 'group_application.user.sip_client');
+
+-- Create the permissions for the SIP user group
+INSERT INTO permission.grp_perm_map (grp, depth, perm)
+	SELECT currval('permission.grp_tree_id_seq'), 0, id
+	FROM permission.perm_list
+	WHERE code IN (
+		'COPY_CHECKIN',
+		'COPY_CHECKOUT',
+		'RENEW_CIRC',
+		'VIEW_CIRCULATIONS',
+		'VIEW_COPY_CHECKOUT_HISTORY',
+		'VIEW_PERMIT_CHECKOUT',
+		'VIEW_USER',
+		'VIEW_USER_FINES_SUMMARY',
+		'VIEW_USER_TRANSACTIONS'
+	)
+;
+
+-- Give local system administrators (10) the permission (135) to create a SIP-Client user
+INSERT INTO permission.grp_perm_map (grp, perm, depth)
+	VALUES (10, 135, 0);
+
+COMMIT;
-- 
2.11.0