+++ /dev/null
-#!/usr/bin/perl
-
-# Copyright (C) 2012 Georgia Public Library Service
-# Chris Sharp <csharp@georgialibraries.org>
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-#
-
-=pod
-
-This is a utility for use with the Evergreen ILS that eases the transfer
-the ownership of one user's report folders, templates, and report definitions
-to another user.
-
-Usage:
-
- ./change_reports_owner.pl
-=cut
-
-use warnings;
-use strict;
-use DBI;
-
-my $select_query = qq/SELECT u.id, u.first_given_name, u.second_given_name, u.family_name, u.email
- FROM actor.usr u
- JOIN actor.card cd on (cd.usr = u.id and cd.active)
- WHERE cd.barcode = ?/;
-my $old_barcode;
-my $new_barcode;
-my $response;
-my $db_name = "mydbname";
-my $db_host = "mydbhost";
-my $db_user = "mydbuser";
-my $db_pass = "mydbpass";
-
-print "Please provide the library card barcode for the user who currently owns the reports: ";
-chomp($old_barcode = <STDIN>);
-print "Please provide the library card barcode for the user to whom we are transferring ownership: ";
-chomp($new_barcode = <STDIN>);
-
-# connect
-my $dbh = DBI->connect("DBI:Pg:dbname=$db_name;host=$db_host", "$db_user", "$db_pass", {'RaiseError' => 1, 'AutoCommit' => 0});
-
-
-# execute SELECT query
-my $sth = $dbh->prepare("$select_query");
-$sth->execute($old_barcode);
-my ($user_id1, $first_name1, $middle_name1, $last_name1, $email1);
-
-# iterate through resultset
-# print values
-my $ref = $sth->bind_columns(\($user_id1, $first_name1, $middle_name1, $last_name1, $email1));
-while ($sth->fetch) {
- print "The old user has ID $user_id1, barcode $old_barcode and is named $first_name1 $middle_name1 $last_name1 with an email address of $email1.\n";
-}
-
-# execute SELECT query
-#$sth = $dbh->prepare("$select_query2");
-$sth->execute($new_barcode);
-my ($user_id2, $first_name2, $middle_name2, $last_name2, $email2);
-
-# iterate through resultset
-# print values
-$ref = $sth->bind_columns(\($user_id2, $first_name2, $middle_name2, $last_name2, $email2));
-while ($sth->fetch) {
- print "The new user has ID $user_id2, barcode $new_barcode and is named $first_name2 $middle_name2 $last_name2 with an email address of $email2.\n";
-}
-
-print "The email address for user $last_name2 is $email2 - is this the correct email address to use (y/n)? ";
-chomp($response = <STDIN>);
-
-if ($response eq "n") {
- print "Please enter the email address to be used: ";
- chomp(my $email2 = <STDIN>);
- print "We will use $email2 as the email address.\n";
-} elsif ($response ne "y") {
- print "Response invalid. Aborting.\n";
- exit();
-}
-
-print "We will be transferring all report templates and report definitions from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode). Is that correct (y/n)? ";
-chomp($response = <STDIN>);
-
-if ($response eq "n") {
- print "Aborting.\n";
- # clean up
- $dbh->disconnect();
- exit();
-} elsif ($response eq "y") {
- print "Beginning transfer of reports from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode)...\n";
-} else {
- print "The response is not valid. Aborting.\n";
- # clean up
- $dbh->disconnect();
- exit();
-}
-
-my @tables = qw/reporter.template_folder reporter.report_folder reporter.output_folder reporter.template reporter.report/;
-foreach my $table (@tables) {
- my $update_owner = qq/UPDATE $table SET owner = ? WHERE owner = ?/;
- $sth = $dbh->prepare("$update_owner");
- $sth->execute($user_id2, $user_id1);
- print "Setting new owner for $table.\n";
-}
-
-print "Updating reporter.schedule with new runner and replacing email addresses.\n";
-
-if (defined($email1) && defined($email2)) {
- my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = regexp_replace(email, ?, ?) WHERE runner = ? AND complete_time IS NULL/;
- $sth = $dbh->prepare("$update_schedule");
- $sth->execute($user_id2, $email1, $email2, $user_id1);
-} elsif (defined($email2)) {
- my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = email || ' ' || ? WHERE runner = ? AND complete_time IS NULL/;
- $sth = $dbh->prepare("$update_schedule");
- $sth->execute($user_id2, $email2, $user_id1);
-} else {
- my $update_schedule = qq/UPDATE reporter.schedule SET runner = ? WHERE runner = ? AND complete_time IS NULL/;
- $sth = $dbh->prepare("$update_schedule");
- $sth->execute($user_id2, $user_id1);
-}
-
-$dbh->commit;
-
-# clean up
-$dbh->disconnect();
+++ /dev/null
-#!/bin/bash
-
-# a script to generate an SQL file containing reports templates that can be
-# imported into another Evergreen server
-
-DB_USER="evergreen"
-DB_HOST="localhost"
-DB_NAME="evergreen"
-OUTFILE="templates_to_import_`date +%F`.sql"
-
-read -p "Please enter a comma-and-space-separated list of template ids for the reports you're copying (e.g., 12345, 12346, 12347 - No comma necessary if just one template): " TEMPLATES
-read -p "Please enter the id for the destination template owner on the destination server: " OWNER
-read -p "Please enter the id for the destination template folder on the destination server for the copied templates: " FOLDER
-
-read -d '' SQL <<EOF
-select 'insert into reporter.template (owner, name, description, data, folder) values ($OWNER, ''' || name || ''', ''' || description || ''', ''' || data || ''', $FOLDER);'
-from reporter.template
-where id in ($TEMPLATES);
-EOF
-
-echo "begin;" >> $OUTFILE
-psql -A -t -U "$DB_USER" -h "$DB_HOST" -c "$SQL" "$DB_NAME" >> "$OUTFILE"
-echo "commit;" >> $OUTFILE
-
+++ /dev/null
-This script makes the following assumptions (and doesn't check if these reqs are not met):
-
-*library system names don't have parens AND the word 'row' in their name
-*the first column of every line identifies what file we want to write THIS line to
-*if that item has a '-' in it, remove the contents after and including the '-'
-*there exists an 'output' directory within the current directory
-*there exists a 'csv' directory within the current directory that contains all of the .csv files you want to process
-*All of said csv files contain a line at the top containing the column headers
-*that the scripts will be run from the current directory
-*that file system is in the right state for us to write to (permissions, space, actually exists etc)
-
-you can run the python script on an individual file:
-
-#INSERT EXAMPLE
-
-or you can just run ./do_all.sh to iterate over all files in the csv folder
+++ /dev/null
-# Ignore everything in this directory
-*
-# Except this file
-!.gitignore
+++ /dev/null
-#!/bin/bash
-
-for F in csv/*
-do
- ./split_fy.py "$F"
-done
+++ /dev/null
-# Ignore everything in this directory
-*
-# Except this file
-!.gitignore
+++ /dev/null
-#!/usr/bin/python
-
-import os
-import sys
-
-if len(sys.argv) != 2:
- print "Usage: <scriptname> csv/blah.csv"
- quit()
-fname = sys.argv[1].split('/')[1]
-report_name = fname.split('.csv')[0]
-
-f = open ('csv/'+fname,'r')
-header = f.readline()
-lines = f.readlines()
-output=dict()
-for line in lines:
- #check for just systems e.g. STATELIB instead of the expected STATELIB-L
- tmp = line.split(',')[0]
- if tmp.find('-') == -1:
- libsys = tmp
- else:
- libsys = line.split('-')[0]
- #remove any whitespace junk that may have made it thus far:
- libsys = libsys.strip()
-
- #if there are any lines that don't look right or lack a lib name, notify us, and discard the line by
- #continuing on to the next item in the for loop
- if libsys == "":
- print "Couldn't identify library for this line:", line
- print "This was contained in:", fname
- continue
- #attempt to detect row count lines and skip to the next item if we think we got one
- #probably could have regex matched it
- if libsys.find('(') != -1 or libsys.find(')') != -1:
- if libsys.find('row') != -1:
- print "We most likely encountered a row count line:"+line+" in file:"+fname
- continue
-
-
- #check to make sure we don't try to append to a non-existant object, and then add to the list object
- if libsys not in output:
- output[libsys]=list()
- output[libsys].append(line)
-
-#iterate through the dictionary we just created, and generate the files
-for k,v in output.items():
- outfile = open('output/'+k+'-'+fname,'w')
- outfile.write(header)
- for line in v:
- outfile.write(line)
- outfile.close()
-
-
-f.close()
+++ /dev/null
-Georgia PINES IntraPINES Loans Reports
-======================================
-
-These reports were generated on TODAYS_DATE on FQDN.
-
-Please notify ADMIN_EMAIL of any problems you encounter.
+++ /dev/null
-#!/bin/bash
-#
-# Simplifying the process of running IntraPINES loans.
-# Chris Sharp <csharp@georgialibraries.org>
-#
-
-Usage () {
-echo -e "Usage:\n\t$0 [-s START_DATE] [-e END_DATE]\n\n\tDates are in YYYY-MM-DD format."
-exit;
-}
-
-while getopts s:e:h OPTIONS
-do case "$OPTIONS" in
- s) START_DATE="$OPTARG" ;;
- e) END_DATE="$OPTARG" ;;
- h) Usage ;;
- esac
-done
-
-WORK_DIR=$(pwd)
-PGUSERNAME="mydbuser"
-DBHOST="mydbhost"
-DBNAME="mydbname"
-OUTBOX="$WORK_DIR/outbox"
-
-GetDates () {
-clear
-echo "This script will generate an SQL script to run IntraPINES loans reports"
-echo "(for loans between systems not including loans between branches within"
-echo "systems)."
-echo
-read -p "Please enter the start date for the reports (in YYYY-MM-DD format): " START_DATE
-read -p "Please enter the end date for the reports (in YYYY-MM-DD format): " END_DATE
-echo
-read -p "Reports will be run between $START_DATE and $END_DATE. Is this correct (y/n)? " RESPONSE
-if [ $RESPONSE != "y" ]; then
- echo "Aborting."
- exit
-fi
-}
-
-GenerateReports () {
-SQL_TEMPLATE="intrapines_matrix_report-template.sql"
-TMP_TEMPLATE="/tmp/intrapines_matrix_report-$START_DATE-to-$END_DATE.sql"
-README="/tmp/README-intraPINES-loans-$START_DATE-to-$END_DATE.txt"
-EMAILS="recipient@example.org"
-ADMIN_EMAIL="admin-email@example.org"
-
-echo "Generating SQL template."
-cp -f "$SQL_TEMPLATE" "$TMP_TEMPLATE"
-sed -i "s^START_DATE^$START_DATE^g" "$TMP_TEMPLATE"
-sed -i "s^END_DATE^$END_DATE^g" "$TMP_TEMPLATE"
-echo "Performing SQL queries."
-psql -F\| -A -f "$TMP_TEMPLATE" -h "$DBHOST" -U "$DBNAME"
-echo "Generating README.txt."
-cp "$WORK_DIR"/README.template "$README"
-sed -i "s^FQDN^$(hostname -f)^g" "$README"
-sed -i "s^TODAYS_DATE^$(date)^g" "$README"
-sed -i "s^ADMIN_EMAIL^$ADMIN_EMAIL^g" "$README"
-}
-
-ZipReports () {
-echo "Zipping results."
-zip -jmT $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip /tmp/*$START_DATE-to-$END_DATE.csv $README
-}
-
-CleanUp () {
-# clean up
-echo "Cleaning up."
-rm -f $README $TMP_TEMPLATE
-}
-MailResults () {
-echo "IntraPINES loans report for $START_DATE to $END_DATE attached." | mutt -s "IntraPINES Loans report $START_DATE - $END_DATE" -a $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip -- $EMAILS
-}
-
-if [ -z $START_DATE ] || [ -z $END_DATE ]; then
- GetDates
-fi
-GenerateReports
-ZipReports
-CleanUp
-MailResults
-
-echo "Complete! Zipped results available at $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip"
+++ /dev/null
--- must %s/{date}/{marker}g for this to work ... dunno why :(
-\set start 'START_DATE'
-\set end 'END_DATE'
-
-\o /tmp/intrapines-facility-all-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
- from
- ( select s.shortname, count(*) as send_count
- from "action".transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
- join
- ( select r.shortname, count(*) as recv_count
- from "action".transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
- using (shortname)
- order by 1;
-
-\o /tmp/intrapines-facility-hold-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
- from
- ( select s.shortname, count(*) as send_count
- from "action".hold_transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
- join
- ( select r.shortname, count(*) as recv_count
- from "action".hold_transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
- using (shortname)
- order by 1;
-
-\o /tmp/intrapines-system-hold-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
- from
- ( select p.shortname, count(*) as send_count
- from "action".hold_transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- join actor.org_unit p on (s.parent_ou = p.id)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
- join
- ( select p.shortname, count(*) as recv_count
- from "action".hold_transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- join actor.org_unit p on (r.parent_ou = p.id)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
- using (shortname)
- order by 1;
-
-\o /tmp/intrapines-system-all-transit-totals-START_DATE-to-END_DATE.csv
-
-select s.shortname, send_count, recv_count, recv_count - send_count as delta
- from
- ( select p.shortname, count(*) as send_count
- from "action".transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- join actor.org_unit p on (s.parent_ou = p.id)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
- join
- ( select p.shortname, count(*) as recv_count
- from "action".transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- join actor.org_unit p on (r.parent_ou = p.id)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
- using (shortname)
- order by 1;
-
-\o /tmp/intrapines-facility-to-facility-hold-transit-START_DATE-to-END_DATE.csv
-
-select s.shortname as source, r.shortname as destination, count(*) as count
- from "action".hold_transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
- group by 1, 2
- order by 1,2;
-
-\o /tmp/intrapines-facility-to-facility-all-transit-START_DATE-to-END_DATE.csv
-
-select s.shortname as source, r.shortname as destination, count(*) as count
- from "action".transit_copy t
- join actor.org_unit s on (s.id = t.source)
- join actor.org_unit r on (r.id = t.dest)
- where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
- group by 1, 2
- order by 1,2;
-
-\o
-
--- /dev/null
+#!/usr/bin/perl
+
+# Copyright (C) 2012 Georgia Public Library Service
+# Chris Sharp <csharp@georgialibraries.org>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+
+=pod
+
+This is a utility for use with the Evergreen ILS that eases the transfer
+the ownership of one user's report folders, templates, and report definitions
+to another user.
+
+Usage:
+
+ ./change_reports_owner.pl
+=cut
+
+use warnings;
+use strict;
+use DBI;
+
+my $select_query = qq/SELECT u.id, u.first_given_name, u.second_given_name, u.family_name, u.email
+ FROM actor.usr u
+ JOIN actor.card cd on (cd.usr = u.id and cd.active)
+ WHERE cd.barcode = ?/;
+my $old_barcode;
+my $new_barcode;
+my $response;
+my $db_name = "mydbname";
+my $db_host = "mydbhost";
+my $db_user = "mydbuser";
+my $db_pass = "mydbpass";
+
+print "Please provide the library card barcode for the user who currently owns the reports: ";
+chomp($old_barcode = <STDIN>);
+print "Please provide the library card barcode for the user to whom we are transferring ownership: ";
+chomp($new_barcode = <STDIN>);
+
+# connect
+my $dbh = DBI->connect("DBI:Pg:dbname=$db_name;host=$db_host", "$db_user", "$db_pass", {'RaiseError' => 1, 'AutoCommit' => 0});
+
+
+# execute SELECT query
+my $sth = $dbh->prepare("$select_query");
+$sth->execute($old_barcode);
+my ($user_id1, $first_name1, $middle_name1, $last_name1, $email1);
+
+# iterate through resultset
+# print values
+my $ref = $sth->bind_columns(\($user_id1, $first_name1, $middle_name1, $last_name1, $email1));
+while ($sth->fetch) {
+ print "The old user has ID $user_id1, barcode $old_barcode and is named $first_name1 $middle_name1 $last_name1 with an email address of $email1.\n";
+}
+
+# execute SELECT query
+#$sth = $dbh->prepare("$select_query2");
+$sth->execute($new_barcode);
+my ($user_id2, $first_name2, $middle_name2, $last_name2, $email2);
+
+# iterate through resultset
+# print values
+$ref = $sth->bind_columns(\($user_id2, $first_name2, $middle_name2, $last_name2, $email2));
+while ($sth->fetch) {
+ print "The new user has ID $user_id2, barcode $new_barcode and is named $first_name2 $middle_name2 $last_name2 with an email address of $email2.\n";
+}
+
+print "The email address for user $last_name2 is $email2 - is this the correct email address to use (y/n)? ";
+chomp($response = <STDIN>);
+
+if ($response eq "n") {
+ print "Please enter the email address to be used: ";
+ chomp(my $email2 = <STDIN>);
+ print "We will use $email2 as the email address.\n";
+} elsif ($response ne "y") {
+ print "Response invalid. Aborting.\n";
+ exit();
+}
+
+print "We will be transferring all report templates and report definitions from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode). Is that correct (y/n)? ";
+chomp($response = <STDIN>);
+
+if ($response eq "n") {
+ print "Aborting.\n";
+ # clean up
+ $dbh->disconnect();
+ exit();
+} elsif ($response eq "y") {
+ print "Beginning transfer of reports from $first_name1 $middle_name1 $last_name1 ($old_barcode) to $first_name2 $middle_name2 $last_name2 ($new_barcode)...\n";
+} else {
+ print "The response is not valid. Aborting.\n";
+ # clean up
+ $dbh->disconnect();
+ exit();
+}
+
+my @tables = qw/reporter.template_folder reporter.report_folder reporter.output_folder reporter.template reporter.report/;
+foreach my $table (@tables) {
+ my $update_owner = qq/UPDATE $table SET owner = ? WHERE owner = ?/;
+ $sth = $dbh->prepare("$update_owner");
+ $sth->execute($user_id2, $user_id1);
+ print "Setting new owner for $table.\n";
+}
+
+print "Updating reporter.schedule with new runner and replacing email addresses.\n";
+
+if (defined($email1) && defined($email2)) {
+ my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = regexp_replace(email, ?, ?) WHERE runner = ? AND complete_time IS NULL/;
+ $sth = $dbh->prepare("$update_schedule");
+ $sth->execute($user_id2, $email1, $email2, $user_id1);
+} elsif (defined($email2)) {
+ my $update_schedule = qq/UPDATE reporter.schedule SET runner = ?, email = email || ' ' || ? WHERE runner = ? AND complete_time IS NULL/;
+ $sth = $dbh->prepare("$update_schedule");
+ $sth->execute($user_id2, $email2, $user_id1);
+} else {
+ my $update_schedule = qq/UPDATE reporter.schedule SET runner = ? WHERE runner = ? AND complete_time IS NULL/;
+ $sth = $dbh->prepare("$update_schedule");
+ $sth->execute($user_id2, $user_id1);
+}
+
+$dbh->commit;
+
+# clean up
+$dbh->disconnect();
--- /dev/null
+#!/bin/bash
+
+# a script to generate an SQL file containing reports templates that can be
+# imported into another Evergreen server
+
+DB_USER="evergreen"
+DB_HOST="localhost"
+DB_NAME="evergreen"
+OUTFILE="templates_to_import_`date +%F`.sql"
+
+read -p "Please enter a comma-and-space-separated list of template ids for the reports you're copying (e.g., 12345, 12346, 12347 - No comma necessary if just one template): " TEMPLATES
+read -p "Please enter the id for the destination template owner on the destination server: " OWNER
+read -p "Please enter the id for the destination template folder on the destination server for the copied templates: " FOLDER
+
+read -d '' SQL <<EOF
+select 'insert into reporter.template (owner, name, description, data, folder) values ($OWNER, ''' || name || ''', ''' || description || ''', ''' || data || ''', $FOLDER);'
+from reporter.template
+where id in ($TEMPLATES);
+EOF
+
+echo "begin;" >> $OUTFILE
+psql -A -t -U "$DB_USER" -h "$DB_HOST" -c "$SQL" "$DB_NAME" >> "$OUTFILE"
+echo "commit;" >> $OUTFILE
+
--- /dev/null
+This script makes the following assumptions (and doesn't check if these reqs are not met):
+
+*library system names don't have parens AND the word 'row' in their name
+*the first column of every line identifies what file we want to write THIS line to
+*if that item has a '-' in it, remove the contents after and including the '-'
+*there exists an 'output' directory within the current directory
+*there exists a 'csv' directory within the current directory that contains all of the .csv files you want to process
+*All of said csv files contain a line at the top containing the column headers
+*that the scripts will be run from the current directory
+*that file system is in the right state for us to write to (permissions, space, actually exists etc)
+
+you can run the python script on an individual file:
+
+#INSERT EXAMPLE
+
+or you can just run ./do_all.sh to iterate over all files in the csv folder
--- /dev/null
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
--- /dev/null
+#!/bin/bash
+
+for F in csv/*
+do
+ ./split_fy.py "$F"
+done
--- /dev/null
+# Ignore everything in this directory
+*
+# Except this file
+!.gitignore
--- /dev/null
+#!/usr/bin/python
+
+import os
+import sys
+
+if len(sys.argv) != 2:
+ print "Usage: <scriptname> csv/blah.csv"
+ quit()
+fname = sys.argv[1].split('/')[1]
+report_name = fname.split('.csv')[0]
+
+f = open ('csv/'+fname,'r')
+header = f.readline()
+lines = f.readlines()
+output=dict()
+for line in lines:
+ #check for just systems e.g. STATELIB instead of the expected STATELIB-L
+ tmp = line.split(',')[0]
+ if tmp.find('-') == -1:
+ libsys = tmp
+ else:
+ libsys = line.split('-')[0]
+ #remove any whitespace junk that may have made it thus far:
+ libsys = libsys.strip()
+
+ #if there are any lines that don't look right or lack a lib name, notify us, and discard the line by
+ #continuing on to the next item in the for loop
+ if libsys == "":
+ print "Couldn't identify library for this line:", line
+ print "This was contained in:", fname
+ continue
+ #attempt to detect row count lines and skip to the next item if we think we got one
+ #probably could have regex matched it
+ if libsys.find('(') != -1 or libsys.find(')') != -1:
+ if libsys.find('row') != -1:
+ print "We most likely encountered a row count line:"+line+" in file:"+fname
+ continue
+
+
+ #check to make sure we don't try to append to a non-existant object, and then add to the list object
+ if libsys not in output:
+ output[libsys]=list()
+ output[libsys].append(line)
+
+#iterate through the dictionary we just created, and generate the files
+for k,v in output.items():
+ outfile = open('output/'+k+'-'+fname,'w')
+ outfile.write(header)
+ for line in v:
+ outfile.write(line)
+ outfile.close()
+
+
+f.close()
--- /dev/null
+Georgia PINES IntraPINES Loans Reports
+======================================
+
+These reports were generated on TODAYS_DATE on FQDN.
+
+Please notify ADMIN_EMAIL of any problems you encounter.
--- /dev/null
+#!/bin/bash
+#
+# Simplifying the process of running IntraPINES loans.
+# Chris Sharp <csharp@georgialibraries.org>
+#
+
+Usage () {
+echo -e "Usage:\n\t$0 [-s START_DATE] [-e END_DATE]\n\n\tDates are in YYYY-MM-DD format."
+exit;
+}
+
+while getopts s:e:h OPTIONS
+do case "$OPTIONS" in
+ s) START_DATE="$OPTARG" ;;
+ e) END_DATE="$OPTARG" ;;
+ h) Usage ;;
+ esac
+done
+
+WORK_DIR=$(pwd)
+PGUSERNAME="mydbuser"
+DBHOST="mydbhost"
+DBNAME="mydbname"
+OUTBOX="$WORK_DIR/outbox"
+
+GetDates () {
+clear
+echo "This script will generate an SQL script to run IntraPINES loans reports"
+echo "(for loans between systems not including loans between branches within"
+echo "systems)."
+echo
+read -p "Please enter the start date for the reports (in YYYY-MM-DD format): " START_DATE
+read -p "Please enter the end date for the reports (in YYYY-MM-DD format): " END_DATE
+echo
+read -p "Reports will be run between $START_DATE and $END_DATE. Is this correct (y/n)? " RESPONSE
+if [ $RESPONSE != "y" ]; then
+ echo "Aborting."
+ exit
+fi
+}
+
+GenerateReports () {
+SQL_TEMPLATE="intrapines_matrix_report-template.sql"
+TMP_TEMPLATE="/tmp/intrapines_matrix_report-$START_DATE-to-$END_DATE.sql"
+README="/tmp/README-intraPINES-loans-$START_DATE-to-$END_DATE.txt"
+EMAILS="recipient@example.org"
+ADMIN_EMAIL="admin-email@example.org"
+
+echo "Generating SQL template."
+cp -f "$SQL_TEMPLATE" "$TMP_TEMPLATE"
+sed -i "s^START_DATE^$START_DATE^g" "$TMP_TEMPLATE"
+sed -i "s^END_DATE^$END_DATE^g" "$TMP_TEMPLATE"
+echo "Performing SQL queries."
+psql -F\| -A -f "$TMP_TEMPLATE" -h "$DBHOST" -U "$DBNAME"
+echo "Generating README.txt."
+cp "$WORK_DIR"/README.template "$README"
+sed -i "s^FQDN^$(hostname -f)^g" "$README"
+sed -i "s^TODAYS_DATE^$(date)^g" "$README"
+sed -i "s^ADMIN_EMAIL^$ADMIN_EMAIL^g" "$README"
+}
+
+ZipReports () {
+echo "Zipping results."
+zip -jmT $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip /tmp/*$START_DATE-to-$END_DATE.csv $README
+}
+
+CleanUp () {
+# clean up
+echo "Cleaning up."
+rm -f $README $TMP_TEMPLATE
+}
+MailResults () {
+echo "IntraPINES loans report for $START_DATE to $END_DATE attached." | mutt -s "IntraPINES Loans report $START_DATE - $END_DATE" -a $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip -- $EMAILS
+}
+
+if [ -z $START_DATE ] || [ -z $END_DATE ]; then
+ GetDates
+fi
+GenerateReports
+ZipReports
+CleanUp
+MailResults
+
+echo "Complete! Zipped results available at $OUTBOX/intrapines-reports-$START_DATE-to-$END_DATE.zip"
--- /dev/null
+-- must %s/{date}/{marker}g for this to work ... dunno why :(
+\set start 'START_DATE'
+\set end 'END_DATE'
+
+\o /tmp/intrapines-facility-all-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+ from
+ ( select s.shortname, count(*) as send_count
+ from "action".transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+ join
+ ( select r.shortname, count(*) as recv_count
+ from "action".transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+ using (shortname)
+ order by 1;
+
+\o /tmp/intrapines-facility-hold-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+ from
+ ( select s.shortname, count(*) as send_count
+ from "action".hold_transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+ join
+ ( select r.shortname, count(*) as recv_count
+ from "action".hold_transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+ using (shortname)
+ order by 1;
+
+\o /tmp/intrapines-system-hold-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+ from
+ ( select p.shortname, count(*) as send_count
+ from "action".hold_transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ join actor.org_unit p on (s.parent_ou = p.id)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+ join
+ ( select p.shortname, count(*) as recv_count
+ from "action".hold_transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ join actor.org_unit p on (r.parent_ou = p.id)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+ using (shortname)
+ order by 1;
+
+\o /tmp/intrapines-system-all-transit-totals-START_DATE-to-END_DATE.csv
+
+select s.shortname, send_count, recv_count, recv_count - send_count as delta
+ from
+ ( select p.shortname, count(*) as send_count
+ from "action".transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ join actor.org_unit p on (s.parent_ou = p.id)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) s
+ join
+ ( select p.shortname, count(*) as recv_count
+ from "action".transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ join actor.org_unit p on (r.parent_ou = p.id)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE' group by 1) r
+ using (shortname)
+ order by 1;
+
+\o /tmp/intrapines-facility-to-facility-hold-transit-START_DATE-to-END_DATE.csv
+
+select s.shortname as source, r.shortname as destination, count(*) as count
+ from "action".hold_transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
+ group by 1, 2
+ order by 1,2;
+
+\o /tmp/intrapines-facility-to-facility-all-transit-START_DATE-to-END_DATE.csv
+
+select s.shortname as source, r.shortname as destination, count(*) as count
+ from "action".transit_copy t
+ join actor.org_unit s on (s.id = t.source)
+ join actor.org_unit r on (r.id = t.dest)
+ where s.parent_ou <> r.parent_ou and source_send_time between 'START_DATE' and 'END_DATE'
+ group by 1, 2
+ order by 1,2;
+
+\o
+
--- /dev/null
+#!/bin/bash
+
+# (c) Copyright 2013 Georgia Public Library Service
+# Chris Sharp <csharp@georgialibraries.org
+#
+# A utility to ease database server administration by reconfiguring
+# which database is used for reports queries. It assumes that you're
+# running pgpool to balance the load between two servers named db02 and
+# db03, and that you have created the alternate config files inside your
+# OpenSRF configuration directory.
+#
+
+WORKING_DIR="$PWD"
+CONF_DIR="/openils/conf"
+RUNNING_CONFIG="$CONF_DIR/opensrf.xml"
+PGPOOL_CONFIG="$CONF_DIR/opensrf.xml.pgpool"
+DB02_CONFIG="$CONF_DIR/opensrf.xml.db02"
+DB03_CONFIG="$CONF_DIR/opensrf.xml.db03"
+STATE_FILE="$WORKING_DIR/current"
+OPENSRF_RESTART="/etc/init.d/opensrf restart"
+REPORTER_LOCK="/tmp/reporter-LOCK"
+REPORTS_CMD="/openils/bin/clark-kent.pl"
+
+CheckConfig () {
+for FILE in $PGPOOL_CONFIG $DB02_CONFIG $DB03_CONFIG; do
+if [ ! -e $FILE ]; then
+ echo "Required file $FILE is missing. Please create it before running this script again."
+ exit;
+fi;
+done
+}
+
+GetState () {
+if [ -e $STATE_FILE ]; then
+ STATE=$(cat $STATE_FILE)
+ echo "The reporter is currently using $STATE configuration."
+else
+ echo "The state file at $STATE_FILE does not exist, skipping..."
+fi
+}
+
+CheckReporter () {
+if [ -e $REPORTER_LOCK ]; then
+ echo "Reporter lock file is in place, which may mean the reporter is running."
+ echo "Please stop the reporter process and ensure no reports are running before"
+ echo "running this script again."
+ exit;
+fi
+}
+
+StartReporter () {
+echo "Attempting to start the reporting process..."
+su - opensrf -c /bin/bash -c "$REPORTS_CMD -d -c $REPORTS_COUNT"
+if [ "$(pidof 'Clark Kent, waiting for trouble')" ]; then
+ echo "Reporting process appears to have started successfully."
+else
+ echo "Looks like something went wrong. Please start the reporter process manually."
+ exit;
+fi
+}
+
+CheckConfig
+CheckReporter
+echo "This utility changes which database servers are used to run reports queries."
+echo
+GetState
+echo
+echo "Configuration Options:"
+echo
+echo -e "\t1) PgPool Configuration (load is balanced between db02 and db03)"
+echo -e "\t2) db02 only"
+echo -e "\t3) db03 only"
+echo
+read -p "Please select the desired configuration: " CHOICE
+
+if [ $CHOICE = "1" ]; then
+ echo "Selecting PgPool configuration..."
+ cp -v $PGPOOL_CONFIG $RUNNING_CONFIG
+ REPORTS_COUNT="12"
+ echo "Restarting services to activate new configuration..."
+ $OPENSRF_RESTART
+ StartReporter
+ echo "pgpool" > $STATE_FILE
+elif [ $CHOICE = "2" ]; then
+ echo "Selecting db02 configuration..."
+ cp -v $DB02_CONFIG $RUNNING_CONFIG
+ REPORTS_COUNT="8"
+ echo "Restarting services to activate new configuration..."
+ $OPENSRF_RESTART
+ StartReporter
+ echo "db02" > $STATE_FILE
+elif [ $CHOICE = "3" ]; then
+ echo "Selecting db03 configuration..."
+ cp -v $DB03_CONFIG $RUNNING_CONFIG
+ REPORTS_COUNT="8"
+ echo "Restarting services to activate new configuration..."
+ $OPENSRF_RESTART
+ StartReporter
+ echo "db03" > $STATE_FILE
+else
+ echo "Invalid response. Aborting."
+ exit;
+fi
+
+
+++ /dev/null
-#!/bin/bash
-
-# (c) Copyright 2013 Georgia Public Library Service
-# Chris Sharp <csharp@georgialibraries.org
-#
-# A utility to ease database server administration by reconfiguring
-# which database is used for reports queries. It assumes that you're
-# running pgpool to balance the load between two servers named db02 and
-# db03, and that you have created the alternate config files inside your
-# OpenSRF configuration directory.
-#
-
-WORKING_DIR="$PWD"
-CONF_DIR="/openils/conf"
-RUNNING_CONFIG="$CONF_DIR/opensrf.xml"
-PGPOOL_CONFIG="$CONF_DIR/opensrf.xml.pgpool"
-DB02_CONFIG="$CONF_DIR/opensrf.xml.db02"
-DB03_CONFIG="$CONF_DIR/opensrf.xml.db03"
-STATE_FILE="$WORKING_DIR/current"
-OPENSRF_RESTART="/etc/init.d/opensrf restart"
-REPORTER_LOCK="/tmp/reporter-LOCK"
-REPORTS_CMD="/openils/bin/clark-kent.pl"
-
-CheckConfig () {
-for FILE in $PGPOOL_CONFIG $DB02_CONFIG $DB03_CONFIG; do
-if [ ! -e $FILE ]; then
- echo "Required file $FILE is missing. Please create it before running this script again."
- exit;
-fi;
-done
-}
-
-GetState () {
-if [ -e $STATE_FILE ]; then
- STATE=$(cat $STATE_FILE)
- echo "The reporter is currently using $STATE configuration."
-else
- echo "The state file at $STATE_FILE does not exist, skipping..."
-fi
-}
-
-CheckReporter () {
-if [ -e $REPORTER_LOCK ]; then
- echo "Reporter lock file is in place, which may mean the reporter is running."
- echo "Please stop the reporter process and ensure no reports are running before"
- echo "running this script again."
- exit;
-fi
-}
-
-StartReporter () {
-echo "Attempting to start the reporting process..."
-su - opensrf -c /bin/bash -c "$REPORTS_CMD -d -c $REPORTS_COUNT"
-if [ "$(pidof 'Clark Kent, waiting for trouble')" ]; then
- echo "Reporting process appears to have started successfully."
-else
- echo "Looks like something went wrong. Please start the reporter process manually."
- exit;
-fi
-}
-
-CheckConfig
-CheckReporter
-echo "This utility changes which database servers are used to run reports queries."
-echo
-GetState
-echo
-echo "Configuration Options:"
-echo
-echo -e "\t1) PgPool Configuration (load is balanced between db02 and db03)"
-echo -e "\t2) db02 only"
-echo -e "\t3) db03 only"
-echo
-read -p "Please select the desired configuration: " CHOICE
-
-if [ $CHOICE = "1" ]; then
- echo "Selecting PgPool configuration..."
- cp -v $PGPOOL_CONFIG $RUNNING_CONFIG
- REPORTS_COUNT="12"
- echo "Restarting services to activate new configuration..."
- $OPENSRF_RESTART
- StartReporter
- echo "pgpool" > $STATE_FILE
-elif [ $CHOICE = "2" ]; then
- echo "Selecting db02 configuration..."
- cp -v $DB02_CONFIG $RUNNING_CONFIG
- REPORTS_COUNT="8"
- echo "Restarting services to activate new configuration..."
- $OPENSRF_RESTART
- StartReporter
- echo "db02" > $STATE_FILE
-elif [ $CHOICE = "3" ]; then
- echo "Selecting db03 configuration..."
- cp -v $DB03_CONFIG $RUNNING_CONFIG
- REPORTS_COUNT="8"
- echo "Restarting services to activate new configuration..."
- $OPENSRF_RESTART
- StartReporter
- echo "db03" > $STATE_FILE
-else
- echo "Invalid response. Aborting."
- exit;
-fi
-
-