reorganizing files
authorChris Sharp <csharp@georgialibraries.org>
Thu, 6 Aug 2015 11:58:51 +0000 (07:58 -0400)
committerChris Sharp <csharp@georgialibraries.org>
Thu, 6 Aug 2015 11:58:51 +0000 (07:58 -0400)
23 files changed:
acq/.gitignore [new file with mode: 0644]
change_reports_owner.pl [deleted file]
copy_reports_templates.sh [deleted file]
fy-report-scripts/README [deleted file]
fy-report-scripts/csv/.gitignore [deleted file]
fy-report-scripts/doall.sh [deleted file]
fy-report-scripts/output/.gitignore [deleted file]
fy-report-scripts/split_fy.py [deleted file]
intrapines-loans/README.template [deleted file]
intrapines-loans/intrapines-loans-reports.sh [deleted file]
intrapines-loans/intrapines_matrix_report-template.sql [deleted file]
reports/change_reports_owner.pl [new file with mode: 0755]
reports/copy_reports_templates.sh [new file with mode: 0755]
reports/fy-report-scripts/README [new file with mode: 0644]
reports/fy-report-scripts/csv/.gitignore [new file with mode: 0644]
reports/fy-report-scripts/doall.sh [new file with mode: 0755]
reports/fy-report-scripts/output/.gitignore [new file with mode: 0644]
reports/fy-report-scripts/split_fy.py [new file with mode: 0755]
reports/intrapines-loans/README.template [new file with mode: 0644]
reports/intrapines-loans/intrapines-loans-reports.sh [new file with mode: 0755]
reports/intrapines-loans/intrapines_matrix_report-template.sql [new file with mode: 0644]
reports/switcheroo.sh [new file with mode: 0755]
switcheroo.sh [deleted file]

diff --git a/acq/.gitignore b/acq/.gitignore
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/change_reports_owner.pl b/change_reports_owner.pl
deleted file mode 100755 (executable)
index 758d807..0000000
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/perl
-
-# Copyright (C) 2012 Georgia Public Library Service
-# Chris Sharp <csharp@georgialibraries.org>
-#    
-#    This program is free software: you can redistribute it and/or modify
-#    it under the terms of the GNU General Public License as published by
-#    the Free Software Foundation, either version 3 of the License, or
-#    (at your option) any later version.
-#
-#    This program is distributed in the hope that it will be useful,
-#    but WITHOUT ANY WARRANTY; without even the implied warranty of
-#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#    GNU General Public License for more details.
-#
-#    You should have received a copy of the GNU General Public License
-#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
-#
-
-=pod
-
-This is a utility for use with the Evergreen ILS that eases the transfer
-the ownership of one user's report folders, templates, and report definitions 
-to another user.
-
-Usage:
-
-       ./change_reports_owner.pl
-=cut
-
-use warnings;
-use strict;
-use DBI;
-
-my $select_query = qq/SELECT u.id, u.first_given_name, u.second_given_name, u.family_name, u.email
-        FROM actor.usr u
-        JOIN actor.card cd on (cd.usr = u.id and cd.active)
-        WHERE cd.barcode = ?/;
-my $old_barcode;
-my $new_barcode;
-my $response;
-my $db_name = "mydbname";
-my $db_host = "mydbhost";
-my $db_user = "mydbuser";
-my $db_pass = "mydbpass";
-
-print "Please provide the library card barcode for the user who currently owns the reports: ";
-chomp($old_barcode = <STDIN>);
-print "Please provide the library card barcode for the user to whom we are transferring ownership: ";
-chomp($new_barcode = <STDIN>);
-
-# connect
-my $dbh = DBI->connect("DBI:Pg:dbname=$db_name;host=$db_host", "$db_user", "$db_pass", {'RaiseError' => 1, 'AutoCommit' => 0});
-
-
-# execute SELECT query
-my $sth = $dbh->prepare("$select_query");
-$sth->execute($old_barcode);
-my ($user_id1, $first_name1, $middle_name1, $last_name1, $email1);
-
-# iterate through resultset
-# print values
-my $ref = $sth->bind_columns(\($user_id1, $first_name1, $middle_name1, $last_name1, $email1));
-while ($sth->fetch) {
-    print "The old user has ID $user_id1, barcode $old_barcode and is named $first_name1 $middle_name1 $last_name1 with an email address of $email1.\n";
-}
-
-# execute SELECT query
-#$sth = $dbh->prepare("$select_query2");
-$sth->execute($new_barcode);
-my ($user_id2, $first_name2, $middle_name2, $last_name2, $email2);
-
-# iterate through resultset
-# print values
-$ref = $sth->bind_columns(\($user_id2, $first_name2, $middle_name2, $last_name2, $email2));
-while ($sth->fetch) {
-    print "The new user has ID $user_id2, barcode $new_barcode and is named $first_name2 $middle_name2 $last_name2 with an email address of $email2.\n";
-}
-
-print "The email address for user $last_name2 is $email2 - is this the correct email address to use (y/n)? ";
-chomp($response = <STDIN>);
-
-if ($response eq "n") {
-       print "Please enter the email address to be used: ";
-       chomp(my $email2 = <STDIN>);
-       print "We will use $email2 as the email address.\n";
-} elsif ($response ne "y") {
-       print "Response invalid.  Aborting.\n";
-       exit();
-}
-
-print "We will be transferring all report templates and report definitions from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode).  Is that correct (y/n)? ";
-chomp($response = <STDIN>);
-
-if ($response eq "n") {
-       print "Aborting.\n";
-       # clean up
-       $dbh->disconnect();
-       exit();
-} elsif ($response eq "y") {
-       print "Beginning transfer of reports from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode)...\n";
-} else {
-       print "The response is not valid.  Aborting.\n";
-       # clean up
-       $dbh->disconnect();
-       exit();
-}
-
-my @tables = qw/reporter.template_folder reporter.report_folder reporter.output_folder reporter.template reporter.report/;
-foreach my $table (@tables) {
-       my $update_owner = qq/UPDATE $table SET owner = ? WHERE owner = ?/;
-       $sth = $dbh->prepare("$update_owner");
-       $sth->execute($user_id2, $user_id1);
-       print "Setting new owner for $table.\n";
-}
-
-print "Updating reporter.schedule with new runner and replacing email addresses.\n";
-
-if (defined($email1) && defined($email2)) {
-       my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = regexp_replace(email, ?, ?) WHERE runner = ? AND complete_time IS NULL/;
-       $sth = $dbh->prepare("$update_schedule");
-       $sth->execute($user_id2, $email1, $email2, $user_id1);
-} elsif (defined($email2)) {
-       my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = email || ' ' || ? WHERE runner = ? AND complete_time IS NULL/;
-       $sth = $dbh->prepare("$update_schedule");
-       $sth->execute($user_id2, $email2, $user_id1);
-} else {
-       my $update_schedule = qq/UPDATE reporter.schedule SET runner = ? WHERE runner = ? AND complete_time IS NULL/;
-       $sth = $dbh->prepare("$update_schedule");
-       $sth->execute($user_id2, $user_id1);
-}
-
-$dbh->commit;
-
-# clean up
-$dbh->disconnect();
diff --git a/copy_reports_templates.sh b/copy_reports_templates.sh
deleted file mode 100755 (executable)
index 45d2ca5..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-# a script to generate an SQL file containing reports templates that can be 
-# imported into another Evergreen server
-
-DB_USER="evergreen"
-DB_HOST="localhost"
-DB_NAME="evergreen"
-OUTFILE="templates_to_import_`date +%F`.sql"
-
-read -p "Please enter a comma-and-space-separated list of template ids for the reports you're copying (e.g., 12345, 12346, 12347 - No comma necessary if just one template): " TEMPLATES
-read -p "Please enter the id for the destination template owner on the destination server: " OWNER
-read -p "Please enter the id for the destination template folder on the destination server for the copied templates: " FOLDER
-
-read -d '' SQL <<EOF
-select 'insert into reporter.template (owner, name, description, data, folder) values ($OWNER, ''' || name || ''', ''' || description || ''', ''' || data || ''', $FOLDER);'
-from reporter.template
-where id in ($TEMPLATES);
-EOF
-
-echo "begin;" >> $OUTFILE
-psql -A -t -U "$DB_USER" -h "$DB_HOST" -c "$SQL" "$DB_NAME" >> "$OUTFILE"
-echo "commit;" >> $OUTFILE
-
diff --git a/fy-report-scripts/README b/fy-report-scripts/README
deleted file mode 100644 (file)
index 7ba8d10..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-This script makes the following assumptions (and doesn't check if these reqs are not met):
-
-*library system names don't have parens AND the word 'row' in their name
-*the first column of every line identifies what file we want to write THIS line to
-*if that item has a '-' in it, remove the contents after and including the '-'
-*there exists an 'output' directory within the current directory
-*there exists a 'csv' directory within the current directory that contains all of the .csv files you want to process
-*All of said csv files contain a line at the top containing the column headers
-*that the scripts will be run from the current directory
-*that file system is in the right state for us to write to (permissions, space, actually exists etc)
-
-you can run the python script on an individual file:
-
-#INSERT EXAMPLE
-
-or you can just run ./do_all.sh to iterate over all files in the csv folder
diff --git a/fy-report-scripts/csv/.gitignore b/fy-report-scripts/csv/.gitignore
deleted file mode 100644 (file)
index 5e7d273..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-# Ignore everything in this directory
-*
-# Except this file
-!.gitignore
diff --git a/fy-report-scripts/doall.sh b/fy-report-scripts/doall.sh
deleted file mode 100755 (executable)
index 530b6f4..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-for F in csv/* 
-do
-       ./split_fy.py "$F"
-done
diff --git a/fy-report-scripts/output/.gitignore b/fy-report-scripts/output/.gitignore
deleted file mode 100644 (file)
index 5e7d273..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-# Ignore everything in this directory
-*
-# Except this file
-!.gitignore
diff --git a/fy-report-scripts/split_fy.py b/fy-report-scripts/split_fy.py
deleted file mode 100755 (executable)
index 6ae6207..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/python
-
-import os
-import sys
-
-if len(sys.argv) != 2:
-       print "Usage: <scriptname> csv/blah.csv"
-       quit()
-fname = sys.argv[1].split('/')[1]
-report_name = fname.split('.csv')[0]
-
-f = open ('csv/'+fname,'r')
-header = f.readline()
-lines = f.readlines()
-output=dict()
-for line in lines:
-       #check for just systems e.g. STATELIB instead of the expected STATELIB-L
-       tmp = line.split(',')[0]
-       if tmp.find('-') == -1:
-               libsys = tmp
-       else:
-               libsys = line.split('-')[0]
-       #remove any whitespace junk that may have made it thus far:
-       libsys = libsys.strip()
-       
-       #if there are any lines that don't look right or lack a lib name, notify us, and discard the line by
-       #continuing on to the next item in the for loop
-       if libsys == "":
-               print "Couldn't identify library for this line:", line
-               print "This was contained in:", fname 
-               continue
-       #attempt to detect row count lines and skip to the next item if we think we got one
-       #probably could have regex matched it
-       if libsys.find('(') != -1 or libsys.find(')') != -1:
-               if libsys.find('row') != -1:
-                       print "We most likely encountered a row count line:"+line+" in file:"+fname
-                       continue                
-
-
-       #check to make sure we don't try to append to a non-existant object, and then add to the list object
-       if libsys not in output:
-               output[libsys]=list()
-       output[libsys].append(line)
-
-#iterate through the dictionary we just created, and generate the files
-for k,v in output.items():
-       outfile = open('output/'+k+'-'+fname,'w')
-       outfile.write(header)
-       for line in v:
-               outfile.write(line)
-       outfile.close()
-
-
-f.close()
diff --git a/intrapines-loans/README.template b/intrapines-loans/README.template
deleted file mode 100644 (file)
index e3c10b1..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
-Georgia PINES IntraPINES Loans Reports
-======================================
-
-These reports were generated on TODAYS_DATE on FQDN.
-
-Please notify ADMIN_EMAIL of any problems you encounter.
diff --git a/intrapines-loans/intrapines-loans-reports.sh b/intrapines-loans/intrapines-loans-reports.sh
deleted file mode 100755 (executable)
index 82fd49e..0000000
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/bin/bash
-#
-# Simplifying the process of running IntraPINES loans.
-# Chris Sharp <csharp@georgialibraries.org>
-#
-
-Usage () {
-echo -e "Usage:\n\t$0 [-s START_DATE] [-e END_DATE]\n\n\tDates are in YYYY-MM-DD format."
-exit;
-}
-
-while getopts s:e:h OPTIONS
-do     case "$OPTIONS" in
-       s)      START_DATE="$OPTARG" ;;
-       e)      END_DATE="$OPTARG" ;;
-       h)  Usage ;;
-       esac
-done
-
-WORK_DIR=$(pwd)
-PGUSERNAME="mydbuser"
-DBHOST="mydbhost"
-DBNAME="mydbname"
-OUTBOX="$WORK_DIR/outbox"
-
-GetDates () {
-clear
-echo "This script will generate an SQL script to run IntraPINES loans reports"
-echo "(for loans between systems not including loans between branches within"
-echo "systems)."
-echo
-read -p "Please enter the start date for the reports (in YYYY-MM-DD format): " START_DATE
-read -p "Please enter the end date for the reports (in YYYY-MM-DD format): " END_DATE
-echo
-read -p "Reports will be run between $START_DATE and $END_DATE.  Is this correct (y/n)? " RESPONSE
-if [ $RESPONSE != "y" ]; then
-       echo "Aborting."
-       exit
-fi
-}
-
-GenerateReports () {
-SQL_TEMPLATE="intrapines_matrix_report-template.sql"
-TMP_TEMPLATE="/tmp/intrapines_matrix_report-$START_DATE-to-$END_DATE.sql"
-README="/tmp/README-intraPINES-loans-$START_DATE-to-$END_DATE.txt"
-EMAILS="recipient@example.org"
-ADMIN_EMAIL="admin-email@example.org"
-
-echo "Generating SQL template."
-cp -f "$SQL_TEMPLATE" "$TMP_TEMPLATE"
-sed -i "s^START_DATE^$START_DATE^g" "$TMP_TEMPLATE"
-sed -i "s^END_DATE^$END_DATE^g" "$TMP_TEMPLATE"
-echo "Performing SQL queries."
-psql -F\| -A -f "$TMP_TEMPLATE" -h "$DBHOST" -U "$DBNAME"
-echo "Generating README.txt."
-cp "$WORK_DIR"/README.template "$README"
-sed -i "s^FQDN^$(hostname -f)^g" "$README"
-sed -i "s^TODAYS_DATE^$(date)^g" "$README"
-sed -i "s^ADMIN_EMAIL^$ADMIN_EMAIL^g" "$README"
-}
-
-ZipReports () {
-echo "Zipping results."
-zip -jmT $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip /tmp/*$START_DATE-to-$END_DATE.csv $README
-}
-
-CleanUp () {
-# clean up
-echo "Cleaning up."
-rm -f $README $TMP_TEMPLATE
-}
-MailResults () {
-echo "IntraPINES loans report for $START_DATE to $END_DATE attached." | mutt -s "IntraPINES Loans report $START_DATE - $END_DATE" -a $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip -- $EMAILS
-}
-
-if [ -z $START_DATE ] || [ -z $END_DATE ]; then
-       GetDates
-fi
-GenerateReports
-ZipReports
-CleanUp
-MailResults
-
-echo "Complete! Zipped results available at $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip"
diff --git a/intrapines-loans/intrapines_matrix_report-template.sql b/intrapines-loans/intrapines_matrix_report-template.sql
deleted file mode 100644 (file)
index 739669c..0000000
+++ /dev/null
@@ -1,102 +0,0 @@
--- must %s/{date}/{marker}g for this to work ... dunno why :(
-\set start 'START_DATE'
-\set end 'END_DATE'
-
-\o /tmp/intrapines-facility-all-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
-  from
-        ( select  s.shortname, count(*) as send_count
-            from  "action".transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
-        join 
-        ( select  r.shortname, count(*) as recv_count
-            from  "action".transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
-        using (shortname)
-  order by 1;
-
-\o /tmp/intrapines-facility-hold-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
-  from
-        ( select  s.shortname, count(*) as send_count
-            from  "action".hold_transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
-        join 
-        ( select  r.shortname, count(*) as recv_count
-            from  "action".hold_transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
-        using (shortname)
-  order by 1;
-
-\o /tmp/intrapines-system-hold-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
-  from
-        ( select  p.shortname, count(*) as send_count
-            from  "action".hold_transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-                  join actor.org_unit p on (s.parent_ou = p.id)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
-        join 
-        ( select  p.shortname, count(*) as recv_count
-            from  "action".hold_transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-                  join actor.org_unit p on (r.parent_ou = p.id)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
-        using (shortname)
-  order by 1;
-
-\o /tmp/intrapines-system-all-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
-  from
-        ( select  p.shortname, count(*) as send_count
-            from  "action".transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-                  join actor.org_unit p on (s.parent_ou = p.id)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
-        join 
-        ( select  p.shortname, count(*) as recv_count
-            from  "action".transit_copy t
-                  join actor.org_unit s on (s.id = t.source)
-                  join actor.org_unit r on (r.id = t.dest)
-                  join actor.org_unit p on (r.parent_ou = p.id)
-            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
-        using (shortname)
-  order by 1;
-
-\o /tmp/intrapines-facility-to-facility-hold-transit-START_DATE-to-END_DATE.csv
-
-select  s.shortname as source, r.shortname as destination, count(*) as count
-  from  "action".hold_transit_copy t
-       join actor.org_unit s on (s.id = t.source)
-       join actor.org_unit r on (r.id = t.dest)
-  where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
-  group by 1, 2
-  order by 1,2;
-
-\o /tmp/intrapines-facility-to-facility-all-transit-START_DATE-to-END_DATE.csv
-
-select  s.shortname as source, r.shortname as destination, count(*) as count
-  from  "action".transit_copy t
-       join actor.org_unit s on (s.id = t.source)
-       join actor.org_unit r on (r.id = t.dest)
-  where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
-  group by 1, 2
-  order by 1,2;
-
-\o
-
diff --git a/reports/change_reports_owner.pl b/reports/change_reports_owner.pl
new file mode 100755 (executable)
index 0000000..758d807
--- /dev/null
@@ -0,0 +1,136 @@
+#!/usr/bin/perl
+
+# Copyright (C) 2012 Georgia Public Library Service
+# Chris Sharp <csharp@georgialibraries.org>
+#    
+#    This program is free software: you can redistribute it and/or modify
+#    it under the terms of the GNU General Public License as published by
+#    the Free Software Foundation, either version 3 of the License, or
+#    (at your option) any later version.
+#
+#    This program is distributed in the hope that it will be useful,
+#    but WITHOUT ANY WARRANTY; without even the implied warranty of
+#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+#    GNU General Public License for more details.
+#
+#    You should have received a copy of the GNU General Public License
+#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+#
+
+=pod
+
+This is a utility for use with the Evergreen ILS that eases the transfer
+the ownership of one user's report folders, templates, and report definitions 
+to another user.
+
+Usage:
+
+       ./change_reports_owner.pl
+=cut
+
+use warnings;
+use strict;
+use DBI;
+
+my $select_query = qq/SELECT u.id, u.first_given_name, u.second_given_name, u.family_name, u.email
+        FROM actor.usr u
+        JOIN actor.card cd on (cd.usr = u.id and cd.active)
+        WHERE cd.barcode = ?/;
+my $old_barcode;
+my $new_barcode;
+my $response;
+my $db_name = "mydbname";
+my $db_host = "mydbhost";
+my $db_user = "mydbuser";
+my $db_pass = "mydbpass";
+
+print "Please provide the library card barcode for the user who currently owns the reports: ";
+chomp($old_barcode = <STDIN>);
+print "Please provide the library card barcode for the user to whom we are transferring ownership: ";
+chomp($new_barcode = <STDIN>);
+
+# connect
+my $dbh = DBI->connect("DBI:Pg:dbname=$db_name;host=$db_host", "$db_user", "$db_pass", {'RaiseError' => 1, 'AutoCommit' => 0});
+
+
+# execute SELECT query
+my $sth = $dbh->prepare("$select_query");
+$sth->execute($old_barcode);
+my ($user_id1, $first_name1, $middle_name1, $last_name1, $email1);
+
+# iterate through resultset
+# print values
+my $ref = $sth->bind_columns(\($user_id1, $first_name1, $middle_name1, $last_name1, $email1));
+while ($sth->fetch) {
+    print "The old user has ID $user_id1, barcode $old_barcode and is named $first_name1 $middle_name1 $last_name1 with an email address of $email1.\n";
+}
+
+# execute SELECT query
+#$sth = $dbh->prepare("$select_query2");
+$sth->execute($new_barcode);
+my ($user_id2, $first_name2, $middle_name2, $last_name2, $email2);
+
+# iterate through resultset
+# print values
+$ref = $sth->bind_columns(\($user_id2, $first_name2, $middle_name2, $last_name2, $email2));
+while ($sth->fetch) {
+    print "The new user has ID $user_id2, barcode $new_barcode and is named $first_name2 $middle_name2 $last_name2 with an email address of $email2.\n";
+}
+
+print "The email address for user $last_name2 is $email2 - is this the correct email address to use (y/n)? ";
+chomp($response = <STDIN>);
+
+if ($response eq "n") {
+       print "Please enter the email address to be used: ";
+       chomp(my $email2 = <STDIN>);
+       print "We will use $email2 as the email address.\n";
+} elsif ($response ne "y") {
+       print "Response invalid.  Aborting.\n";
+       exit();
+}
+
+print "We will be transferring all report templates and report definitions from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode).  Is that correct (y/n)? ";
+chomp($response = <STDIN>);
+
+if ($response eq "n") {
+       print "Aborting.\n";
+       # clean up
+       $dbh->disconnect();
+       exit();
+} elsif ($response eq "y") {
+       print "Beginning transfer of reports from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode)...\n";
+} else {
+       print "The response is not valid.  Aborting.\n";
+       # clean up
+       $dbh->disconnect();
+       exit();
+}
+
+my @tables = qw/reporter.template_folder reporter.report_folder reporter.output_folder reporter.template reporter.report/;
+foreach my $table (@tables) {
+       my $update_owner = qq/UPDATE $table SET owner = ? WHERE owner = ?/;
+       $sth = $dbh->prepare("$update_owner");
+       $sth->execute($user_id2, $user_id1);
+       print "Setting new owner for $table.\n";
+}
+
+print "Updating reporter.schedule with new runner and replacing email addresses.\n";
+
+if (defined($email1) && defined($email2)) {
+       my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = regexp_replace(email, ?, ?) WHERE runner = ? AND complete_time IS NULL/;
+       $sth = $dbh->prepare("$update_schedule");
+       $sth->execute($user_id2, $email1, $email2, $user_id1);
+} elsif (defined($email2)) {
+       my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = email || ' ' || ? WHERE runner = ? AND complete_time IS NULL/;
+       $sth = $dbh->prepare("$update_schedule");
+       $sth->execute($user_id2, $email2, $user_id1);
+} else {
+       my $update_schedule = qq/UPDATE reporter.schedule SET runner = ? WHERE runner = ? AND complete_time IS NULL/;
+       $sth = $dbh->prepare("$update_schedule");
+       $sth->execute($user_id2, $user_id1);
+}
+
+$dbh->commit;
+
+# clean up
+$dbh->disconnect();
diff --git a/reports/copy_reports_templates.sh b/reports/copy_reports_templates.sh
new file mode 100755 (executable)
index 0000000..45d2ca5
--- /dev/null
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+# a script to generate an SQL file containing reports templates that can be 
+# imported into another Evergreen server
+
+DB_USER="evergreen"
+DB_HOST="localhost"
+DB_NAME="evergreen"
+OUTFILE="templates_to_import_`date +%F`.sql"
+
+read -p "Please enter a comma-and-space-separated list of template ids for the reports you're copying (e.g., 12345, 12346, 12347 - No comma necessary if just one template): " TEMPLATES
+read -p "Please enter the id for the destination template owner on the destination server: " OWNER
+read -p "Please enter the id for the destination template folder on the destination server for the copied templates: " FOLDER
+
+read -d '' SQL <<EOF
+select 'insert into reporter.template (owner, name, description, data, folder) values ($OWNER, ''' || name || ''', ''' || description || ''', ''' || data || ''', $FOLDER);'
+from reporter.template
+where id in ($TEMPLATES);
+EOF
+
+echo "begin;" >> $OUTFILE
+psql -A -t -U "$DB_USER" -h "$DB_HOST" -c "$SQL" "$DB_NAME" >> "$OUTFILE"
+echo "commit;" >> $OUTFILE
+
diff --git a/reports/fy-report-scripts/README b/reports/fy-report-scripts/README
new file mode 100644 (file)
index 0000000..7ba8d10
--- /dev/null
@@ -0,0 +1,16 @@
+This script makes the following assumptions (and doesn't check if these reqs are not met):
+
+*library system names don't have parens AND the word 'row' in their name
+*the first column of every line identifies what file we want to write THIS line to
+*if that item has a '-' in it, remove the contents after and including the '-'
+*there exists an 'output' directory within the current directory
+*there exists a 'csv' directory within the current directory that contains all of the .csv files you want to process
+*All of said csv files contain a line at the top containing the column headers
+*that the scripts will be run from the current directory
+*that file system is in the right state for us to write to (permissions, space, actually exists etc)
+
+you can run the python script on an individual file:
+
+#INSERT EXAMPLE
+
+or you can just run ./do_all.sh to iterate over all files in the csv folder
diff --git a/reports/fy-report-scripts/csv/.gitignore b/reports/fy-report-scripts/csv/.gitignore
new file mode 100644 (file)
index 0000000..5e7d273
--- /dev/null
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
diff --git a/reports/fy-report-scripts/doall.sh b/reports/fy-report-scripts/doall.sh
new file mode 100755 (executable)
index 0000000..530b6f4
--- /dev/null
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+for F in csv/* 
+do
+       ./split_fy.py "$F"
+done
diff --git a/reports/fy-report-scripts/output/.gitignore b/reports/fy-report-scripts/output/.gitignore
new file mode 100644 (file)
index 0000000..5e7d273
--- /dev/null
@@ -0,0 +1,4 @@
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
diff --git a/reports/fy-report-scripts/split_fy.py b/reports/fy-report-scripts/split_fy.py
new file mode 100755 (executable)
index 0000000..6ae6207
--- /dev/null
@@ -0,0 +1,54 @@
+#!/usr/bin/python
+
+import os
+import sys
+
+if len(sys.argv) != 2:
+       print "Usage: <scriptname> csv/blah.csv"
+       quit()
+fname = sys.argv[1].split('/')[1]
+report_name = fname.split('.csv')[0]
+
+f = open ('csv/'+fname,'r')
+header = f.readline()
+lines = f.readlines()
+output=dict()
+for line in lines:
+       #check for just systems e.g. STATELIB instead of the expected STATELIB-L
+       tmp = line.split(',')[0]
+       if tmp.find('-') == -1:
+               libsys = tmp
+       else:
+               libsys = line.split('-')[0]
+       #remove any whitespace junk that may have made it thus far:
+       libsys = libsys.strip()
+       
+       #if there are any lines that don't look right or lack a lib name, notify us, and discard the line by
+       #continuing on to the next item in the for loop
+       if libsys == "":
+               print "Couldn't identify library for this line:", line
+               print "This was contained in:", fname 
+               continue
+       #attempt to detect row count lines and skip to the next item if we think we got one
+       #probably could have regex matched it
+       if libsys.find('(') != -1 or libsys.find(')') != -1:
+               if libsys.find('row') != -1:
+                       print "We most likely encountered a row count line:"+line+" in file:"+fname
+                       continue                
+
+
+       #check to make sure we don't try to append to a non-existant object, and then add to the list object
+       if libsys not in output:
+               output[libsys]=list()
+       output[libsys].append(line)
+
+#iterate through the dictionary we just created, and generate the files
+for k,v in output.items():
+       outfile = open('output/'+k+'-'+fname,'w')
+       outfile.write(header)
+       for line in v:
+               outfile.write(line)
+       outfile.close()
+
+
+f.close()
diff --git a/reports/intrapines-loans/README.template b/reports/intrapines-loans/README.template
new file mode 100644 (file)
index 0000000..e3c10b1
--- /dev/null
@@ -0,0 +1,6 @@
+Georgia PINES IntraPINES Loans Reports
+======================================
+
+These reports were generated on TODAYS_DATE on FQDN.
+
+Please notify ADMIN_EMAIL of any problems you encounter.
diff --git a/reports/intrapines-loans/intrapines-loans-reports.sh b/reports/intrapines-loans/intrapines-loans-reports.sh
new file mode 100755 (executable)
index 0000000..82fd49e
--- /dev/null
@@ -0,0 +1,84 @@
+#!/bin/bash
+#
+# Simplifying the process of running IntraPINES loans.
+# Chris Sharp <csharp@georgialibraries.org>
+#
+
+Usage () {
+echo -e "Usage:\n\t$0 [-s START_DATE] [-e END_DATE]\n\n\tDates are in YYYY-MM-DD format."
+exit;
+}
+
+while getopts s:e:h OPTIONS
+do     case "$OPTIONS" in
+       s)      START_DATE="$OPTARG" ;;
+       e)      END_DATE="$OPTARG" ;;
+       h)  Usage ;;
+       esac
+done
+
+WORK_DIR=$(pwd)
+PGUSERNAME="mydbuser"
+DBHOST="mydbhost"
+DBNAME="mydbname"
+OUTBOX="$WORK_DIR/outbox"
+
+GetDates () {
+clear
+echo "This script will generate an SQL script to run IntraPINES loans reports"
+echo "(for loans between systems not including loans between branches within"
+echo "systems)."
+echo
+read -p "Please enter the start date for the reports (in YYYY-MM-DD format): " START_DATE
+read -p "Please enter the end date for the reports (in YYYY-MM-DD format): " END_DATE
+echo
+read -p "Reports will be run between $START_DATE and $END_DATE.  Is this correct (y/n)? " RESPONSE
+if [ $RESPONSE != "y" ]; then
+       echo "Aborting."
+       exit
+fi
+}
+
+GenerateReports () {
+SQL_TEMPLATE="intrapines_matrix_report-template.sql"
+TMP_TEMPLATE="/tmp/intrapines_matrix_report-$START_DATE-to-$END_DATE.sql"
+README="/tmp/README-intraPINES-loans-$START_DATE-to-$END_DATE.txt"
+EMAILS="recipient@example.org"
+ADMIN_EMAIL="admin-email@example.org"
+
+echo "Generating SQL template."
+cp -f "$SQL_TEMPLATE" "$TMP_TEMPLATE"
+sed -i "s^START_DATE^$START_DATE^g" "$TMP_TEMPLATE"
+sed -i "s^END_DATE^$END_DATE^g" "$TMP_TEMPLATE"
+echo "Performing SQL queries."
+psql -F\| -A -f "$TMP_TEMPLATE" -h "$DBHOST" -U "$DBNAME"
+echo "Generating README.txt."
+cp "$WORK_DIR"/README.template "$README"
+sed -i "s^FQDN^$(hostname -f)^g" "$README"
+sed -i "s^TODAYS_DATE^$(date)^g" "$README"
+sed -i "s^ADMIN_EMAIL^$ADMIN_EMAIL^g" "$README"
+}
+
+ZipReports () {
+echo "Zipping results."
+zip -jmT $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip /tmp/*$START_DATE-to-$END_DATE.csv $README
+}
+
+CleanUp () {
+# clean up
+echo "Cleaning up."
+rm -f $README $TMP_TEMPLATE
+}
+MailResults () {
+echo "IntraPINES loans report for $START_DATE to $END_DATE attached." | mutt -s "IntraPINES Loans report $START_DATE - $END_DATE" -a $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip -- $EMAILS
+}
+
+if [ -z $START_DATE ] || [ -z $END_DATE ]; then
+       GetDates
+fi
+GenerateReports
+ZipReports
+CleanUp
+MailResults
+
+echo "Complete! Zipped results available at $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip"
diff --git a/reports/intrapines-loans/intrapines_matrix_report-template.sql b/reports/intrapines-loans/intrapines_matrix_report-template.sql
new file mode 100644 (file)
index 0000000..739669c
--- /dev/null
@@ -0,0 +1,102 @@
+-- must %s/{date}/{marker}g for this to work ... dunno why :(
+\set start 'START_DATE'
+\set end 'END_DATE'
+
+\o /tmp/intrapines-facility-all-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+  from
+        ( select  s.shortname, count(*) as send_count
+            from  "action".transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+        join 
+        ( select  r.shortname, count(*) as recv_count
+            from  "action".transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+        using (shortname)
+  order by 1;
+
+\o /tmp/intrapines-facility-hold-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+  from
+        ( select  s.shortname, count(*) as send_count
+            from  "action".hold_transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+        join 
+        ( select  r.shortname, count(*) as recv_count
+            from  "action".hold_transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+        using (shortname)
+  order by 1;
+
+\o /tmp/intrapines-system-hold-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+  from
+        ( select  p.shortname, count(*) as send_count
+            from  "action".hold_transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+                  join actor.org_unit p on (s.parent_ou = p.id)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+        join 
+        ( select  p.shortname, count(*) as recv_count
+            from  "action".hold_transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+                  join actor.org_unit p on (r.parent_ou = p.id)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+        using (shortname)
+  order by 1;
+
+\o /tmp/intrapines-system-all-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+  from
+        ( select  p.shortname, count(*) as send_count
+            from  "action".transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+                  join actor.org_unit p on (s.parent_ou = p.id)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+        join 
+        ( select  p.shortname, count(*) as recv_count
+            from  "action".transit_copy t
+                  join actor.org_unit s on (s.id = t.source)
+                  join actor.org_unit r on (r.id = t.dest)
+                  join actor.org_unit p on (r.parent_ou = p.id)
+            where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+        using (shortname)
+  order by 1;
+
+\o /tmp/intrapines-facility-to-facility-hold-transit-START_DATE-to-END_DATE.csv
+
+select  s.shortname as source, r.shortname as destination, count(*) as count
+  from  "action".hold_transit_copy t
+       join actor.org_unit s on (s.id = t.source)
+       join actor.org_unit r on (r.id = t.dest)
+  where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
+  group by 1, 2
+  order by 1,2;
+
+\o /tmp/intrapines-facility-to-facility-all-transit-START_DATE-to-END_DATE.csv
+
+select  s.shortname as source, r.shortname as destination, count(*) as count
+  from  "action".transit_copy t
+       join actor.org_unit s on (s.id = t.source)
+       join actor.org_unit r on (r.id = t.dest)
+  where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
+  group by 1, 2
+  order by 1,2;
+
+\o
+
diff --git a/reports/switcheroo.sh b/reports/switcheroo.sh
new file mode 100755 (executable)
index 0000000..c124752
--- /dev/null
@@ -0,0 +1,105 @@
+#!/bin/bash
+
+# (c) Copyright 2013 Georgia Public Library Service
+# Chris Sharp <csharp@georgialibraries.org
+#
+# A utility to ease database server administration by reconfiguring
+# which database is used for reports queries.  It assumes that you're
+# running pgpool to balance the load between two servers named db02 and
+# db03, and that you have created the alternate config files inside your
+# OpenSRF configuration directory.
+#
+
+WORKING_DIR="$PWD"
+CONF_DIR="/openils/conf"
+RUNNING_CONFIG="$CONF_DIR/opensrf.xml"
+PGPOOL_CONFIG="$CONF_DIR/opensrf.xml.pgpool"
+DB02_CONFIG="$CONF_DIR/opensrf.xml.db02"
+DB03_CONFIG="$CONF_DIR/opensrf.xml.db03"
+STATE_FILE="$WORKING_DIR/current"
+OPENSRF_RESTART="/etc/init.d/opensrf restart"
+REPORTER_LOCK="/tmp/reporter-LOCK"
+REPORTS_CMD="/openils/bin/clark-kent.pl"
+
+CheckConfig () {
+for FILE in $PGPOOL_CONFIG $DB02_CONFIG $DB03_CONFIG; do
+if [ ! -e $FILE ]; then
+       echo "Required file $FILE is missing.  Please create it before running this script again."
+       exit;
+fi;
+done
+}
+
+GetState () {
+if [ -e $STATE_FILE ]; then
+       STATE=$(cat $STATE_FILE)
+       echo "The reporter is currently using $STATE configuration."
+else
+       echo "The state file at $STATE_FILE does not exist, skipping..."
+fi
+}
+
+CheckReporter () {
+if [ -e $REPORTER_LOCK ]; then
+       echo "Reporter lock file is in place, which may mean the reporter is running."
+       echo "Please stop the reporter process and ensure no reports are running before"
+       echo "running this script again."
+       exit;
+fi
+}
+
+StartReporter () {
+echo "Attempting to start the reporting process..."
+su - opensrf -c /bin/bash -c "$REPORTS_CMD -d -c $REPORTS_COUNT"
+if [ "$(pidof 'Clark Kent, waiting for trouble')" ]; then
+       echo "Reporting process appears to have started successfully."
+else
+       echo "Looks like something went wrong.  Please start the reporter process manually."
+       exit;
+fi
+}
+
+CheckConfig
+CheckReporter
+echo "This utility changes which database servers are used to run reports queries."
+echo
+GetState
+echo
+echo "Configuration Options:"
+echo
+echo -e "\t1) PgPool Configuration (load is balanced between db02 and db03)"
+echo -e "\t2) db02 only"
+echo -e "\t3) db03 only"
+echo
+read -p "Please select the desired configuration: " CHOICE
+
+if [ $CHOICE = "1" ]; then
+       echo "Selecting PgPool configuration..."
+       cp -v $PGPOOL_CONFIG $RUNNING_CONFIG
+       REPORTS_COUNT="12"
+       echo "Restarting services to activate new configuration..."
+       $OPENSRF_RESTART
+       StartReporter
+       echo "pgpool" > $STATE_FILE
+elif [ $CHOICE = "2" ]; then
+       echo "Selecting db02 configuration..."
+        cp -v $DB02_CONFIG $RUNNING_CONFIG
+        REPORTS_COUNT="8"
+        echo "Restarting services to activate new configuration..."
+        $OPENSRF_RESTART
+        StartReporter
+       echo "db02" > $STATE_FILE
+elif [ $CHOICE = "3" ]; then
+               echo "Selecting db03 configuration..."
+        cp -v $DB03_CONFIG $RUNNING_CONFIG
+        REPORTS_COUNT="8"
+        echo "Restarting services to activate new configuration..."
+        $OPENSRF_RESTART
+        StartReporter
+       echo "db03" > $STATE_FILE
+else
+       echo "Invalid response. Aborting."
+       exit;
+fi 
+       
+
diff --git a/switcheroo.sh b/switcheroo.sh
deleted file mode 100755 (executable)
index c124752..0000000
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/bin/bash
-
-# (c) Copyright 2013 Georgia Public Library Service
-# Chris Sharp <csharp@georgialibraries.org
-#
-# A utility to ease database server administration by reconfiguring
-# which database is used for reports queries.  It assumes that you're
-# running pgpool to balance the load between two servers named db02 and
-# db03, and that you have created the alternate config files inside your
-# OpenSRF configuration directory.
-#
-
-WORKING_DIR="$PWD"
-CONF_DIR="/openils/conf"
-RUNNING_CONFIG="$CONF_DIR/opensrf.xml"
-PGPOOL_CONFIG="$CONF_DIR/opensrf.xml.pgpool"
-DB02_CONFIG="$CONF_DIR/opensrf.xml.db02"
-DB03_CONFIG="$CONF_DIR/opensrf.xml.db03"
-STATE_FILE="$WORKING_DIR/current"
-OPENSRF_RESTART="/etc/init.d/opensrf restart"
-REPORTER_LOCK="/tmp/reporter-LOCK"
-REPORTS_CMD="/openils/bin/clark-kent.pl"
-
-CheckConfig () {
-for FILE in $PGPOOL_CONFIG $DB02_CONFIG $DB03_CONFIG; do
-if [ ! -e $FILE ]; then
-       echo "Required file $FILE is missing.  Please create it before running this script again."
-       exit;
-fi;
-done
-}
-
-GetState () {
-if [ -e $STATE_FILE ]; then
-       STATE=$(cat $STATE_FILE)
-       echo "The reporter is currently using $STATE configuration."
-else
-       echo "The state file at $STATE_FILE does not exist, skipping..."
-fi
-}
-
-CheckReporter () {
-if [ -e $REPORTER_LOCK ]; then
-       echo "Reporter lock file is in place, which may mean the reporter is running."
-       echo "Please stop the reporter process and ensure no reports are running before"
-       echo "running this script again."
-       exit;
-fi
-}
-
-StartReporter () {
-echo "Attempting to start the reporting process..."
-su - opensrf -c /bin/bash -c "$REPORTS_CMD -d -c $REPORTS_COUNT"
-if [ "$(pidof 'Clark Kent, waiting for trouble')" ]; then
-       echo "Reporting process appears to have started successfully."
-else
-       echo "Looks like something went wrong.  Please start the reporter process manually."
-       exit;
-fi
-}
-
-CheckConfig
-CheckReporter
-echo "This utility changes which database servers are used to run reports queries."
-echo
-GetState
-echo
-echo "Configuration Options:"
-echo
-echo -e "\t1) PgPool Configuration (load is balanced between db02 and db03)"
-echo -e "\t2) db02 only"
-echo -e "\t3) db03 only"
-echo
-read -p "Please select the desired configuration: " CHOICE
-
-if [ $CHOICE = "1" ]; then
-       echo "Selecting PgPool configuration..."
-       cp -v $PGPOOL_CONFIG $RUNNING_CONFIG
-       REPORTS_COUNT="12"
-       echo "Restarting services to activate new configuration..."
-       $OPENSRF_RESTART
-       StartReporter
-       echo "pgpool" > $STATE_FILE
-elif [ $CHOICE = "2" ]; then
-       echo "Selecting db02 configuration..."
-        cp -v $DB02_CONFIG $RUNNING_CONFIG
-        REPORTS_COUNT="8"
-        echo "Restarting services to activate new configuration..."
-        $OPENSRF_RESTART
-        StartReporter
-       echo "db02" > $STATE_FILE
-elif [ $CHOICE = "3" ]; then
-               echo "Selecting db03 configuration..."
-        cp -v $DB03_CONFIG $RUNNING_CONFIG
-        REPORTS_COUNT="8"
-        echo "Restarting services to activate new configuration..."
-        $OPENSRF_RESTART
-        StartReporter
-       echo "db03" > $STATE_FILE
-else
-       echo "Invalid response. Aborting."
-       exit;
-fi 
-       
-