<field reporter:label="Processing Group Context Field" name="group_field" oils_obj:array_position="13" oils_persist:virtual="false" reporter:datatype="text"/>
<field reporter:label="Template" name="template" oils_obj:array_position="14" oils_persist:virtual="false" reporter:datatype="text"/>
<field reporter:label="Environmet Entries" name="env" oils_obj:array_position="15" oils_persist:virtual="true" reporter:datatype="link"/>
+ <field reporter:label="Parameters" name="params" oils_obj:array_position="16" oils_persist:virtual="true" reporter:datatype="link"/>
</fields>
<links>
<link field="owner" reltype="has_a" key="id" map="" class="aou"/>
<link field="cleanup_success" reltype="has_a" key="id" map="" class="atclean"/>
<link field="cleanup_failure" reltype="has_a" key="id" map="" class="atclean"/>
<link field="env" reltype="has_many" key="id" map="" class="atenv"/>
+ <link field="params" reltype="has_many" key="id" map="" class="atevparam"/>
</links>
</class>
<link field="question" reltype="has_a" key="id" map="" class="asvq"/>
<link field="responses" reltype="has_many" key="answer" map="" class="asvr"/>
</links>
+ <permacrud xmlns="http://open-ils.org/spec/opensrf/IDL/permacrud/v1">
+ <actions>
+ <create permission="ADMIN_SURVEY">
+ <context link="question" jump="survey" field="owner"/>
+ </create>
+ <retrieve/>
+ <update permission="ADMIN_SURVEY">
+ <context link="question" jump="survey" field="owner"/>
+ </update>
+ <delete permission="ADMIN_SURVEY">
+ <context link="question" jump="survey" field="owner"/>
+ </delete>
+ </actions>
+ </permacrud>
</class>
<class id="ancc" controller="open-ils.cstore" oils_obj:fieldmapper="action::non_cataloged_circulation" oils_persist:tablename="action.non_cataloged_circulation" reporter:core="true" reporter:label="Non-cataloged Circulation">
<fields oils_persist:primary="id" oils_persist:sequence="action.non_cataloged_circulation_id_seq">
<services>
<service>opensrf.math</service>
- <service>open-ils.cat</service>
- <service>open-ils.supercat</service>
- <service>open-ils.search</service>
- <service>open-ils.circ</service>
<service>open-ils.actor</service>
<service>open-ils.auth</service>
- <service>open-ils.fielder</service>
+ <service>open-ils.cat</service>
+ <service>open-ils.circ</service>
<service>open-ils.collections</service>
+ <service>open-ils.fielder</service>
+ <service>open-ils.permacrud</service>
<service>open-ils.reporter</service>
+ <service>open-ils.search</service>
+ <service>open-ils.supercat</service>
+ <service>open-ils.vandelay</service>
</services>
</router>
<xs:element name="context" nillable="true">
<xs:complexType>
<xs:attribute name="link"/>
+ <xs:attribute name="jump"/>
<xs:attribute name="field" use="required"/>
<xs:attribute name="global_required"/>
</xs:complexType>
AM_CFLAGS = $(DEF_CFLAGS) -DOSRF_LOG_PARAMS -I@top_srcdir@/include/
AM_LDFLAGS = $(DEF_LDFLAGS) -L$(DBI_LIBS) -lopensrf
-bin_PROGRAMS = oils_dataloader
+bin_PROGRAMS = oils_dataloader dump_idl
oils_dataloader_SOURCES = oils_dataloader.c
oils_dataloader_LDFLAGS = $(AM_LDFLAGS) -loils_idl
oils_dataloader_DEPENDENCIES = liboils_idl.la liboils_utils.la
+dump_idl_SOURCES = dump_idl.c
+dump_idl_LDFLAGS = $(AM_LDFLAGS) -loils_idl
+dump_idl_DEPENDENCIES = liboils_idl.la liboils_utils.la
+
lib_LTLIBRARIES = liboils_idl.la liboils_utils.la oils_cstore.la oils_rstore.la oils_pcrud.la oils_auth.la
liboils_idl_la_SOURCES = oils_idl-core.c
--- /dev/null
+/*
+* C Implementation: dump_idl
+*
+* Description:
+*
+*
+* Author: Scott McKellar <scott@esilibrary.com>, (C) 2009
+*
+* Copyright: See COPYING file that comes with this distribution
+*
+*/
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <opensrf/string_array.h>
+#include <opensrf/osrf_hash.h>
+#include <openils/oils_idl.h>
+
+static void dump_idl( osrfHash* IDLHash );
+static void dump_class( osrfHash* class_hash, const char* class_name );
+static void dump_fields( osrfHash* field_hash );
+static void dump_one_field( osrfHash* field_hash, const char* field_name );
+static void dump_links( osrfHash* links_hash );
+static void dump_one_link( osrfHash* link_hash, const char* link_name );
+static void dump_permacrud( osrfHash* pcrud_hash );
+static void dump_action( osrfHash* action_hash, const char* action_name );
+static void dump_foreign_context( osrfHash* fc_hash );
+static void dump_fc_class( osrfHash* fc_class_hash, const char* class_name );
+static void dump_string_array(
+ osrfStringArray* sarr, const char* name, const char* indent );
+
+int main( int argc, char* argv[] ) {
+ int rc = 0;
+
+ // Suppress informational messages
+ osrfLogSetLevel( OSRF_LOG_WARNING );
+
+ // Get name of IDL file, if specified on command line
+ const char* IDL_filename = NULL;
+ int filename_expected = 0; // boolean
+ int i;
+ for( i = 1; i < argc; ++i ) {
+ const char* arg = argv[ i ];
+ printf( "%s\n", arg );
+ if( filename_expected ) {
+ IDL_filename = arg;
+ filename_expected = 0;
+ } else {
+ if( '-' == arg[ 0 ] && 'f' == arg[1] ) {
+ if( IDL_filename ) {
+ fprintf( stderr, "Only one IDL file may be specified\n" );
+ return 1;
+ } else {
+ if( arg[ 2 ] )
+ IDL_filename = arg + 2;
+ else
+ filename_expected = 1;
+ }
+ }
+ else
+ break;
+ }
+ }
+
+ if( filename_expected ) {
+ fprintf( stderr, "IDL filename expected on command line, not found\n" );
+ return 1;
+ }
+
+ // No filename? Look in the environment
+ if( !IDL_filename )
+ IDL_filename = getenv( "OILS_IDL_FILENAME" );
+
+ // Still no filename? Apply a default
+ if( !IDL_filename )
+ IDL_filename = "/openils/conf/fm_IDL.xml";
+
+ printf( "IDL filename: %s\n", IDL_filename );
+
+ osrfHash* IDL = oilsIDLInit( IDL_filename );
+ if( NULL == IDL ) {
+ fputs( "Failed to build IDL\n", stderr );
+ rc = 1;
+ }
+
+ if( i >= argc )
+ // No classes specified? Dump them all
+ dump_idl( IDL );
+ else do {
+ // Dump the requested classes
+ dump_class( osrfHashGet( IDL, argv[ i ] ), argv[ i ] );
+ ++i;
+ } while( i < argc );
+
+ return rc;
+}
+
+static void dump_idl( osrfHash* IDLHash ) {
+ if( NULL == IDLHash )
+ return;
+
+ if( 0 == osrfHashGetCount( IDLHash ) )
+ return;
+
+ osrfHashIterator* iter = osrfNewHashIterator( IDLHash );
+ osrfHash* class_hash = NULL;
+
+ // Dump each class
+ for( ;; ) {
+ class_hash = osrfHashIteratorNext( iter );
+ if( class_hash )
+ dump_class( class_hash, osrfHashIteratorKey( iter ) );
+ else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_class( osrfHash* class_hash, const char* class_name )
+{
+ if( !class_hash || !class_name )
+ return;
+
+ if( 0 == osrfHashGetCount( class_hash ) )
+ return;
+
+ printf( "Class %s\n", class_name );
+ const char* indent = " ";
+
+ osrfHashIterator* iter = osrfNewHashIterator( class_hash );
+
+ // Dump each attribute, etc. of the class hash
+ for( ;; ) {
+ void* class_attr = osrfHashIteratorNext( iter );
+ if( class_attr ) {
+ const char* attr_name = osrfHashIteratorKey( iter );
+ if( !strcmp( attr_name, "classname" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) class_attr );
+ else if( !strcmp( attr_name, "fieldmapper" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) class_attr );
+ else if( !strcmp( attr_name, "tablename" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) class_attr );
+ else if( !strcmp( attr_name, "virtual" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) class_attr );
+ else if( !strcmp( attr_name, "controller" ) )
+ dump_string_array( (osrfStringArray*) class_attr, attr_name, indent );
+ else if( !strcmp( attr_name, "fields" ) )
+ dump_fields( (osrfHash*) class_attr );
+ else if( !strcmp( attr_name, "links" ) )
+ dump_links( (osrfHash*) class_attr );
+ else if( !strcmp( attr_name, "primarykey" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) class_attr );
+ else if( !strcmp( attr_name, "sequence" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) class_attr );
+ else if( !strcmp( attr_name, "permacrud" ) )
+ dump_permacrud( (osrfHash*) class_attr );
+ else if( !strcmp( attr_name, "source_definition" ) )
+ printf( "%s%s:\n%s\n", indent, attr_name, (char*) class_attr );
+ else
+ printf( "%s%s (unknown)\n", indent, attr_name );
+ } else
+ break;
+ }
+}
+
+static void dump_fields( osrfHash* fields_hash ) {
+ if( NULL == fields_hash )
+ return;
+
+ if( 0 == osrfHashGetCount( fields_hash ) )
+ return;
+
+ fputs( " fields\n", stdout );
+
+ osrfHashIterator* iter = osrfNewHashIterator( fields_hash );
+ osrfHash* fields_attr = NULL;
+
+ // Dump each field
+ for( ;; ) {
+ fields_attr = osrfHashIteratorNext( iter );
+ if( fields_attr )
+ dump_one_field( fields_attr, osrfHashIteratorKey( iter ) );
+ else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_one_field( osrfHash* field_hash, const char* field_name ) {
+ if( !field_hash || !field_name )
+ return;
+
+ if( 0 == osrfHashGetCount( field_hash ) )
+ return;
+
+ printf( " %s\n", field_name );
+
+ osrfHashIterator* iter = osrfNewHashIterator( field_hash );
+ const char* field_attr = NULL;
+ const char* indent = " ";
+
+ // Dump each field attribute
+ for( ;; ) {
+ field_attr = osrfHashIteratorNext( iter );
+ if( field_attr )
+ printf( "%s%s: %s\n", indent, osrfHashIteratorKey( iter ), field_attr );
+ else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_links( osrfHash* links_hash ) {
+ if( NULL == links_hash )
+ return;
+
+ if( 0 == osrfHashGetCount( links_hash ) )
+ return;
+
+ fputs( " links\n", stdout );
+
+ osrfHashIterator* iter = osrfNewHashIterator( links_hash );
+ osrfHash* links_attr = NULL;
+
+ // Dump each link
+ for( ;; ) {
+ links_attr = osrfHashIteratorNext( iter );
+ if( links_attr )
+ dump_one_link( links_attr, osrfHashIteratorKey( iter ) );
+ else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_one_link( osrfHash* link_hash, const char* link_name ) {
+ if( !link_hash || !link_name )
+ return;
+
+ if( 0 == osrfHashGetCount( link_hash ) )
+ return;
+
+ printf( " %s\n", link_name );
+
+ osrfHashIterator* iter = osrfNewHashIterator( link_hash );
+ const void* link_attr = NULL;
+ const char* indent = " ";
+
+ // Dump each link attribute
+ for( ;; ) {
+ link_attr = osrfHashIteratorNext( iter );
+ if( link_attr ) {
+ const char* link_attr_name = osrfHashIteratorKey( iter );
+ if( !strcmp( link_attr_name, "reltype" ) )
+ printf( "%s%s: %s\n", indent, link_attr_name, (char*) link_attr );
+ else if( !strcmp( link_attr_name, "key" ) )
+ printf( "%s%s: %s\n", indent, link_attr_name, (char*) link_attr );
+ else if( !strcmp( link_attr_name, "class" ) )
+ printf( "%s%s: %s\n", indent, link_attr_name, (char*) link_attr );
+ else if( !strcmp( link_attr_name, "map" ) )
+ dump_string_array( (osrfStringArray*) link_attr, link_attr_name, indent );
+ else if( !strcmp( link_attr_name, "field" ) )
+ printf( "%s%s: %s\n", indent, link_attr_name, (char*) link_attr );
+ else
+ printf( "%s%s (unknown)\n", indent, link_attr_name );
+ } else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_permacrud( osrfHash* pcrud_hash ) {
+ if( NULL == pcrud_hash )
+ return;
+
+ if( 0 == osrfHashGetCount( pcrud_hash ) )
+ return;
+
+ fputs( " permacrud\n", stdout );
+
+ osrfHashIterator* iter = osrfNewHashIterator( pcrud_hash );
+ osrfHash* pcrud_attr = NULL;
+
+ // Dump each action
+ for( ;; ) {
+ pcrud_attr = osrfHashIteratorNext( iter );
+ if( pcrud_attr )
+ dump_action( pcrud_attr, osrfHashIteratorKey( iter ) );
+ else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_action( osrfHash* action_hash, const char* action_name ) {
+ if( !action_hash || !action_name )
+ return;
+
+ if( 0 == osrfHashGetCount( action_hash ) )
+ return;
+
+ printf( " %s\n", action_name );
+
+ osrfHashIterator* iter = osrfNewHashIterator( action_hash );
+ void* action_attr = NULL;
+ const char* indent = " ";
+
+ // Dump each attribute of the action
+ for( ;; ) {
+ action_attr = osrfHashIteratorNext( iter );
+ if( action_attr ) {
+ const char* attr_name = osrfHashIteratorKey( iter );
+ if( !strcmp( attr_name, "permission" ) )
+ dump_string_array( action_attr, attr_name, indent );
+ else if( !strcmp( attr_name, "global_required" ) )
+ printf( "%s%s: %s\n", indent, attr_name, (char*) action_attr );
+ else if( !strcmp( attr_name, "local_context" ) )
+ dump_string_array( action_attr, attr_name, indent );
+ else if( !strcmp( attr_name, "foreign_context" ) )
+ dump_foreign_context( action_attr );
+ else
+ printf( "%s%s (unknown)\n", indent, attr_name );
+ } else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_foreign_context( osrfHash* fc_hash ) {
+ if( !fc_hash )
+ return;
+
+ if( 0 == osrfHashGetCount( fc_hash ) )
+ return;
+
+ fputs( " foreign_context\n", stdout );
+
+ osrfHashIterator* iter = osrfNewHashIterator( fc_hash );
+ osrfHash* fc_attr = NULL;
+
+ // Dump each foreign context attribute
+ for( ;; ) {
+ fc_attr = osrfHashIteratorNext( iter );
+ if( fc_attr )
+ dump_fc_class( (osrfHash*) fc_attr, osrfHashIteratorKey( iter ) );
+ else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_fc_class( osrfHash* fc_class_hash, const char* class_name )
+{
+ if( ! fc_class_hash )
+ return;
+
+ if( 0 == osrfHashGetCount( fc_class_hash ) )
+ return;
+
+ printf( " %s\n", class_name );
+
+ osrfHashIterator* iter = osrfNewHashIterator( fc_class_hash );
+ void* fc_class_attr = NULL;
+ const char* indent = " ";
+
+ // Dump each foreign context attribute
+ for( ;; ) {
+ fc_class_attr = osrfHashIteratorNext( iter );
+ if( fc_class_attr ) {
+ const char* fc_class_attr_name = osrfHashIteratorKey( iter );
+ if( !strcmp( fc_class_attr_name, "field" ) )
+ printf( "%s%s: %s\n", indent, fc_class_attr_name, (const char*) fc_class_attr );
+ else if( !strcmp( fc_class_attr_name, "fkey" ) )
+ printf( "%s%s: %s\n", indent, fc_class_attr_name, (const char*) fc_class_attr );
+ else if( !strcmp( fc_class_attr_name, "jump" ) )
+ dump_string_array( (osrfStringArray*) fc_class_attr, fc_class_attr_name, indent );
+ else if( !strcmp( fc_class_attr_name, "context" ) )
+ dump_string_array( (osrfStringArray*) fc_class_attr, fc_class_attr_name, indent );
+ else
+ printf( "%s%s\n", indent, fc_class_attr_name );
+ } else
+ break;
+ }
+
+ osrfHashIteratorFree( iter );
+}
+
+static void dump_string_array(
+ osrfStringArray* sarr, const char* name, const char* indent ) {
+ if( !sarr || !name || !indent )
+ return;
+
+ int size = sarr->size;
+
+ // Ignore an empty array
+ if( 0 == size )
+ return;
+
+ printf( "%s%s (string array)\n", indent, name );
+
+ int i;
+ for( i = 0; i < size; ++i )
+ printf( "%s\t%s\n", indent, osrfStringArrayGetString( sarr, i ) );
+}
osrfHash* meta = (osrfHash*) ctx->method->userData;
osrfHash* class = osrfHashGet( meta, "class" );
char* method_type = strdup( osrfHashGet(meta, "methodtype") );
- int fetch = 1;
+ int fetch = 0;
if ( ( *method_type == 's' || *method_type == 'i' ) ) {
free(method_type);
- method_type = strdup("retrieve");
- fetch = 0; // don't go to the db for the object for retrieve-type methods
+ method_type = strdup("retrieve"); // search and id_list are equivelant to retrieve for this
+ } else if ( *method_type == 'u' || *method_type == 'd' ) {
+ fetch = 1; // MUST go to the db for the object for update and delete
}
osrfHash* pcrud = osrfHashGet( osrfHashGet(class, "permacrud"), method_type );
&err
);
- jsonObject* _fparam = jsonObjectGetIndex(_list, 0);
-
+ jsonObject* _fparam = jsonObjectClone(jsonObjectGetIndex(_list, 0));
+ jsonObjectFree(_tmp_params);
+ jsonObjectFree(_list);
+
+ osrfStringArray* jump_list = osrfHashGet(fcontext, "jump");
+
+ if (_fparam && jump_list) {
+ char* flink = NULL;
+ int k = 0;
+ while ( (flink = osrfStringArrayGetString(jump_list, k++)) && _fparam ) {
+ free(foreign_pkey_value);
+
+ osrfHash* foreign_link_hash = oilsIDLFindPath( "/%s/links/%s", _fparam->classname, flink );
+
+ foreign_pkey_value = oilsFMGetString(_fparam, flink);
+ foreign_pkey = osrfHashGet( foreign_link_hash, "key" );
+
+ _tmp_params = jsonParseStringFmt(
+ "[{\"%s\":\"%s\"}]",
+ foreign_pkey,
+ foreign_pkey_value
+ );
+
+ _list = doFieldmapperSearch(
+ ctx,
+ osrfHashGet( oilsIDL(), osrfHashGet( foreign_link_hash, "class" ) ),
+ _tmp_params,
+ &err
+ );
+
+ _fparam = jsonObjectClone(jsonObjectGetIndex(_list, 0));
+ jsonObjectFree(_tmp_params);
+ jsonObjectFree(_list);
+ }
+ }
+
+
if (!_fparam) {
- jsonObjectFree(_tmp_params);
- jsonObjectFree(_list);
growing_buffer* msg = buffer_init(128);
buffer_fadd(
return 0;
}
- jsonObjectFree(_tmp_params);
free(foreign_pkey_value);
int j = 0;
osrfStringArrayGetString(context_org_array, context_org_array->size - 1)
);
}
-
- jsonObjectFree(_list);
+
+ jsonObjectFree(_fparam);
}
osrfStringArrayFree(class_list);
osrfHashSet( _tmp_fcontext, osrfHashGet(_flink, "field"), "fkey" );
osrfHashSet( _tmp_fcontext, osrfHashGet(_flink, "key"), "field" );
+ if( (prop_str = (char*)xmlGetNoNsProp(_f, BAD_CAST "jump")) )
+ osrfHashSet( _tmp_fcontext, osrfStringArrayTokenize( prop_str, '.' ), "jump" );
+
// Tokenize field attribute into an osrfStringArray
const char * field_list = (char*) xmlGetProp(_f, BAD_CAST "field");
if( field_list )
if( (prop_str = (char*)xmlGetNoNsProp(_f, BAD_CAST "field") )) {
char* map_list = strdup( prop_str );
osrfLogDebug(OSRF_LOG_MARK,
- "Permacrud foreign context field list is %s", prop_str );
+ "Permacrud local context field list is %s", prop_str );
if (strlen( map_list ) > 0) {
char* st_tmp = NULL;
$| = 1;
-my ($config, $delim, $after) = ('SYSCONFDIR/opensrf_core.xml', ' | ');
+my ($config, $delim, $after,$deleted) = ('SYSCONFDIR/opensrf_core.xml', ' | ');
GetOptions(
"after=s" => \$after,
"boostrap=s" => \$config,
"delimiter=s" => \$delim,
+ "include-deleted" => \$deleted,
);
OpenSRF::System->bootstrap_client( config_file => $config );
my $SQL = 'SELECT id FROM biblio.record_entry WHERE id > 0';
$SQL .= " AND edit_date > '$after'" if ($after);
+$SQL .= " AND deleted IS FALSE" if (!$deleted);
my $ids = $dbh->selectcol_arrayref($SQL);
for my $id ( @$ids ) {
my $row = $dbh->selectrow_hashref( $SQL, {}, $id );
- print "$$row{id}$delim$$row{tnc_source}$delim$$row{tcn_value}$delim$$row{marc}\n";
+ print "$$row{deleted}$delim$$row{id}$delim$$row{tnc_source}$delim$$row{tcn_value}$delim$$row{marc}\n";
}
$partcount++;
}
+print '<collection xmlns="http://www.loc.gov/MARC21/slim">';
+
my $count = 0;
while (<>) {
chomp;
($partlist{tcn_value} = $values[ $partmap{tcn_value}]) =~ s/^\s*//o if ($part eq 'tcn_value');
($partlist{tcn_source} = $values[ $partmap{tcn_source}]) =~ s/^\s*//o if ($part eq 'tcn_source');
($partlist{id} = $values[ $partmap{id}]) =~ s/^\s*//o if ($part eq 'id');
+ ($partlist{deleted} = $values[ $partmap{deleted}]) =~ s/^\s*//o if ($part eq 'deleted');
$partlist{marc} = $values[ $partmap{marc}] if ($part eq 'marc');
$partlist{tcn_value} =~ s/\s*$//o if ($part eq 'tcn_value');
}
}
- if ($set_as_deleted) {
+ if ($set_as_deleted && $partlist{deleted} eq 't') {
my $leader = $r->leader();
if (length($leader)>4) {
substr($leader,5,1,"d");
my $x = $r->as_xml_record;
$x =~ s/\n//gso;
+ $x =~ s/^<[^>]>//o;
print $x."\n";
$count++;
print STDERR "\r$count" unless ($quiet || $count % 100);
};
}
+print '</collection>';
--- /dev/null
+package OpenILS::Application::Trigger::Cleanup;
+sub fourty_two { return 42 }
+sub NOOP_True { return 1 }
+sub NOOP_False { return 0 }
+1;
--- /dev/null
+package OpenILS::Application::Trigger::Collector;
+sub fourty_two { return 42 }
+1;
--- /dev/null
+package OpenILS::Application::Trigger::Event;
+use OpenSRF::EX qw/:try/;
+
+use OpenSRF::Utils::Logger qw/:level/;
+
+use OpenILS::Utils::Fieldmapper;
+use OpenILS::Utils::CStoreEditor q/:funcs/;
+use OpenILS::Application::Trigger::ModRunner;
+
+my $log = 'OpenSRF::Utils::Logger';
+
+sub new {
+ my $class = shift;
+ my $id = shift;
+ $class = ref($class) || $class;
+
+ my $self = bless { id => $id, editor => new_editor() } => $class;
+
+ return $self->init()
+}
+
+sub init {
+ my $self = shift;
+ my $id = shift;
+
+ return $self if ($self->event);
+
+ $self->id( $id );
+ $self->environment( {} );
+
+ return $self if (!$self->id);
+
+ $self->event(
+ $self->editor->retrieve_action_trigger_event([
+ $self->id, {
+ flesh => 2,
+ flesh_fields => {
+ atev => [ 'event_def' ],
+ atevdef => [ 'hook' ]
+ }
+ }
+ ])
+ );
+
+ my $class = $self->_fm_class_by_hint( $self->event->event_def->hook->core_type );
+
+ my $meth = "retreive_" . $class;
+ $meth =~ s/Fieldmapper:://;
+ $meth =~ s/::/_/;
+
+ $self->target( $self->editor->$meth( $self->event->target ) );
+
+ return $self;
+}
+
+sub cleanup {
+ my $self = shift;
+
+ if (defined $self->reacted) {
+ $self->update_state( 'cleaning') || die 'Unable to update event state';
+ try {
+ my $cleanup = $self->reacted ? $self->event->event_def->cleanup_success : $self->event->event_def->cleanup_failure;
+ $self->cleanedup(
+ OpenILS::Application::Trigger::ModRunner::Cleanup
+ ->new( $cleanup, $self->environment )
+ ->run
+ ->final_result
+ );
+ } otherwise {
+ $log->error( shift() );
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ };
+
+ if ($self->cleanedup) {
+ $self->update_state( 'complete' ) || die 'Unable to update event state';
+ } else {
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ }
+
+ } else {
+ $self->{cleanedup} = undef;
+ }
+ return $self;
+}
+
+sub react {
+ my $self = shift;
+
+ if ($self->valid) {
+ if ($self->event->event_def->group_field) { # can't react individually to a grouped definition
+ $self->{reacted} = undef;
+ } else {
+ $self->update_state( 'reacting') || die 'Unable to update event state';
+ try {
+ $self->reacted(
+ OpenILS::Application::Trigger::ModRunner::Reactor
+ ->new( $self->event->event_def->reactor, $self->environment )
+ ->run
+ ->final_result
+ );
+ } otherwise {
+ $log->error( shift() );
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ };
+
+ if (defined $self->reacted) {
+ $self->update_state( 'reacted' ) || die 'Unable to update event state';
+ } else {
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ }
+ }
+ } else {
+ $self->{reacted} = undef;
+ }
+ return $self;
+}
+
+sub validate {
+ my $self = shift;
+
+ return $self if (defined $self->valid);
+
+ if ($self->build_environment->environment->{complete}) {
+ $self->update_state( 'validating') || die 'Unable to update event state';
+ try {
+ $self->valid(
+ OpenILS::Application::Trigger::ModRunner::Validator
+ ->new( $self->event->event_def->validator, $self->environment )
+ ->run
+ ->final_result
+ );
+ } otherwise {
+ $log->error( shift() );
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ };
+
+ if (defined $self->valid) {
+ if ($self->valid) {
+ $self->update_state( 'valid' ) || die 'Unable to update event state';
+ } else {
+ $self->update_state( 'invalid' ) || die 'Unable to update event state';
+ }
+ } else {
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ }
+ } else {
+ $self->{valid} = undef
+ }
+
+ return $self;
+}
+
+sub cleanedup {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $c = shift;
+ $self->{cleanedup} = $c if (defined $c);
+ return $self->{cleanedup};
+}
+
+sub reacted {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $r = shift;
+ $self->{reacted} = $r if (defined $r);
+ return $self->{reacted};
+}
+
+sub valid {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $v = shift;
+ $self->{valid} = $v if (defined $v);
+ return $self->{valid};
+}
+
+sub event {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $e = shift;
+ $self->{event} = $e if (defined $e);
+ return $self->{event};
+}
+
+sub id {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $i = shift;
+ $self->{id} = $i if (defined $i);
+ return $self->{id};
+}
+
+sub environment {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $e = shift;
+ $self->{environment} = $e if (defined $e);
+ return $self->{environment};
+}
+
+sub editor {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $e = shift;
+ $self->{editor} = $e if (defined $e);
+ return $self->{editor};
+}
+
+sub target {
+ my $self = shift;
+ return undef unless (ref $self);
+
+ my $t = shift;
+ $self->{target} = $t if (defined $t);
+ return $self->{target};
+}
+
+sub update_state {
+ my $self = shift;
+ return undef unless ($self && ref $self);
+
+ my $state = shift;
+ return undef unless ($state);
+
+ $self->editor->xact_begin || return undef;
+
+ my $e = $self->editor->retrieve_action_trigger_event( $self->id );
+ $e->update_time( 'now' );
+ $e->update_process( $$ );
+ $e->state( $state );
+ $self->editor->update_action_trigger_event( $e );
+
+ return $self->editor->xact_commit || undef;
+}
+
+sub build_environment {
+ my $self = shift;
+ return $self if ($self->environment->{complete});
+
+ $self->update_state( 'collecting') || die 'Unable to update event state';
+
+ try {
+
+ $self->environment->{target} = $self->target;
+ $self->environment->{event} = $self->event;
+ $self->environment->{template} = $self->event->event_def->template;
+
+ my @env_list = $self->editor->search_action_trigger_environment( { event_def => $self->event->event_def } );
+ my @param_list = $self->editor->search_action_trigger_params( { event_def => $self->event->event_def } );
+
+ $self->environment->{params}{ $_->param } = eval $_->value for ( @param_list );
+
+ for my $e ( @env_list ) {
+ my (@label, @path);
+ @path = split('.', $e->path) if ($e->path);
+ @label = split('.', $e->label) if ($e->label);
+
+ $self->_object_by_path( $self->event->target, $e->collector, \@label, \@path );
+ }
+
+ $self->environment->{complete} = 1;
+ } otherwise {
+ $log->error( shift() );
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ };
+
+ if ($self->environment->{complete}) {
+ $self->update_state( 'collected' ) || die 'Unable to update event state';
+ } else {
+ $self->update_state( 'error' ) || die 'Unable to update event state';
+ }
+
+ return $self;
+}
+
+sub _fm_class_by_hint {
+ my $self = shift;
+ my $hint = shift;
+
+ my ($class) = grep {
+ Fieldmapper->publish_fieldmapper->{$_}->{hint} eq $hint
+ } keys %{ Fieldmapper->publish_fieldmapper };
+
+ return $class;
+}
+
+sub _object_by_path {
+ my $self = shift;
+ my $context = shift;
+ my $collector = shift;
+ my $label = shift;
+ my $path = shift;
+
+ my $step = shift(@$path);
+
+ my $fhint = Fieldmapper->publish_fieldmapper->{$context->class_name}{links}{$step}{class};
+ my $fclass = $self->_fm_class_by_hint( $fhint );
+
+ my $ffield = Fieldmapper->publish_fieldmapper->{$context->class_name}{links}{$step}{key};
+ my $rtype = Fieldmapper->publish_fieldmapper->{$context->class_name}{links}{$step}{reltype};
+
+ my $meth = 'retrieve_';
+ my $multi = 0;
+ my $lfield = $step;
+ if ($rtype eq 'has_many') {
+ $meth = 'search_';
+ $multi = 1;
+ $lfield = $context->Identity;
+ }
+
+ $meth .= $fclass;
+ $meth =~ s/Fieldmapper:://;
+ $meth =~ s/::/_/;
+
+ my $obj = $self->editor->$meth( { $ffield => $context->$lfield() } );
+
+ if (@$path) {
+
+ my $obj_list = [];
+ if (!$multi) {
+ $obj_list = [$obj] if ($obj);
+ } else {
+ $obj_list = $obj;
+ }
+
+ $self->_object_by_path( $_, $collector, $label, $path ) for (@$obj_list);
+
+ $obj = $$obj_list[0] if (!$multi);
+ $context->$step( $obj ) if ($obj && !$label);
+
+ } else {
+
+ if ($collector) {
+ my $obj_list = [$obj] if ($obj && !$multi);
+ $obj_list = $obj if ($multi);
+
+ my @new_obj_list;
+ for my $o ( @$obj_list ) {
+ push @new_obj_list,
+ OpenILS::Application::Trigger::ModRunner::Collector
+ ->new( $collector, $o )
+ ->run
+ ->final_result
+ }
+
+ if (!$multi) {
+ $obj = $new_obj_list[0];
+ } else {
+ $obj = \@new_obj_list;
+ }
+ }
+
+ if ($label) {
+ my $node = $self->environment;
+ my $i = 0; my $max = scalar(@$label) - 1;
+ for (; $i < $max; $i++) {
+ my $part = $$label[$i];
+ $$node{$part} ||= {};
+ $node = $$node{$part};
+ }
+ $$node{$$label[-1]} = $obj;
+ } else {
+ $context->$step( $obj ) if ($obj);
+ }
+ }
+
+ return $obj;
+}
+
+1;
package OpenILS::Application::Trigger::ModLoader;
use UNIVERSAL::require;
+sub prefix { return 'OpenILS::Application::Trigger' }
+
sub new {
my $class = shift;
$class = ref($class) || $class;
- my $mod_thing = shift;
- return undef unless ($mod_thing);
+ my $mod = shift;
+ return undef unless ($mod);
my $self = bless {
- mod_thing => $mod_thing,
- module => $mod_thing->module(),
+ module => ref $mod ? $mod->module() : $mod,
handler => 'handler'
} => $class;
my $loaded = $m->use;
if (!$loaded) {
- $builtin_m = "OpenILS::Application::Trigger::$m";
+ $builtin_m = $self->prefix . "::$m";
$loaded = $builtin_m->use;
if (!$loaded) {
if (!$loaded) {
$h = $self->handler;
- my $builtin_m = "OpenILS::Application::Trigger::$m";
+ $builtin_m = $self->prefix . "::$m";
$loaded = $m->use;
$m = $builtin_m if ($loaded);
}
} else {
$loaded = $m->use;
+
+ # The following is an escape hatch for builtin dummy handlers
+ if (!$loaded) {
+ $loaded = $self->prefix->use;
+ if ($loaded && $self->prefix->can( $self->module ) ) {
+ $m = $self->prefix;
+ $h = $self->module;
+ }
+ }
}
} else {
$m = $builtin_m;
return $self;
};
+package OpenILS::Application::Trigger::ModRunner::Collector;
+use base 'OpenILS::Application::Trigger::ModRunner';
+sub prefix { return 'OpenILS::Application::Trigger::Collector' }
+
+package OpenILS::Application::Trigger::ModRunner::Validator;
+use base 'OpenILS::Application::Trigger::ModRunner';
+sub prefix { return 'OpenILS::Application::Trigger::Validator' }
+
+package OpenILS::Application::Trigger::ModRunner::Reactor;
+use base 'OpenILS::Application::Trigger::ModRunner';
+sub prefix { return 'OpenILS::Application::Trigger::Reactor' }
+
+package OpenILS::Application::Trigger::ModRunner::Cleanup;
+use base 'OpenILS::Application::Trigger::ModRunner';
+sub prefix { return 'OpenILS::Application::Trigger::Cleanup' }
+
package OpenILS::Application::Trigger::ModStackRunner;
use base 'OpenILS::Application::Trigger::ModRunner';
--- /dev/null
+package OpenILS::Application::Trigger::Reactor;
+sub fourty_two { return 42 }
+sub NOOP_True { return 1 }
+sub NOOP_False { return 0 }
+1;
--- /dev/null
+package OpenILS::Application::Trigger::Validator;
+sub fourty_two { return 42 }
+sub NOOP_True { return 1 }
+sub NOOP_False { return 0 }
+
+sub CircIsOpen {
+ my $self = shift;
+ my $env = shift;
+
+ return defined($env->{target}->checkin_time) ? 0 : 1;
+}
+
+sub HoldIsAvailable {
+ my $self = shift;
+ my $env = shift;
+
+ my $t = $env->{target}->transit;
+
+ die "Transit object exists, but is not fleshed. Add 'transit' to the environment in order to use this Validator."
+ if ($t && !ref($t));
+
+ if ($t) {
+ return (defined($env->{target}->capture_time) && defined($t->dest_recv_time)) ? 1 : 0;
+ }
+
+ return defined($env->{target}->capture_time) ? 1 : 0;
+}
+
+1;
module TEXT PRIMARY KEY, -- All live under the OpenILS::Trigger::Collector:: namespace
description TEXT
);
-INSERT INTO action_trigger.collector (module,description) VALUES ('CircCountsByCircMod','Count of Circulations for a User, broken down by circulation modifier');
+INSERT INTO action_trigger.collector (module,description) VALUES ('fourty_two','Returns the answer to life, the universe and everything');
+--INSERT INTO action_trigger.collector (module,description) VALUES ('CircCountsByCircMod','Count of Circulations for a User, broken down by circulation modifier');
-- Simple tests on an FM object from hook.core_type to test for "should we still do this."
CREATE TABLE action_trigger.validator (
module TEXT PRIMARY KEY, -- All live under the OpenILS::Trigger::Validator:: namespace
description TEXT
);
+INSERT INTO action_trigger.validator (module,description) VALUES ('fourty_two','Returns the answer to life, the universe and everything');
+INSERT INTO action_trigger.validator (module,description) VALUES ('NOOP_True','Always returns true -- validation always passes');
+INSERT INTO action_trigger.validator (module,description) VALUES ('NOOP_False','Always returns false -- validation always fails');
INSERT INTO action_trigger.validator (module,description) VALUES ('CircIsOpen','Check that the circulation is still open');
INSERT INTO action_trigger.validator (module,description) VALUES ('HoldIsAvailable','Check that an item is on the hold shelf');
module TEXT PRIMARY KEY, -- All live under the OpenILS::Trigger::Reactor:: namespace
description TEXT
);
+INSERT INTO action_trigger.reactor (module,description) VALUES ('fourty_two','Returns the answer to life, the universe and everything');
+INSERT INTO action_trigger.reactor (module,description) VALUES ('NOOP_True','Always returns true -- reaction always passes');
+INSERT INTO action_trigger.reactor (module,description) VALUES ('NOOP_False','Always returns false -- reaction always fails');
INSERT INTO action_trigger.reactor (module,description) VALUES ('SendEmail','Send an email based on a user-defined template');
INSERT INTO action_trigger.reactor (module,description) VALUES ('GenerateBatchOverduePDF','Output a batch PDF of overdue notices for printing');
module TEXT PRIMARY KEY, -- All live under the OpenILS::Trigger::Cleanup:: namespace
description TEXT
);
+INSERT INTO action_trigger.cleanup (module,description) VALUES ('fourty_two','Returns the answer to life, the universe and everything');
+INSERT INTO action_trigger.cleanup (module,description) VALUES ('NOOP_True','Always returns true -- cleanup always passes');
+INSERT INTO action_trigger.cleanup (module,description) VALUES ('NOOP_False','Always returns false -- cleanup always fails');
INSERT INTO action_trigger.cleanup (module,description) VALUES ('ClearAllPending','Remove all future, pending notifications for this target');
CREATE TABLE action_trigger.event_definition (
update_time TIMESTAMPTZ,
complete_time TIMESTAMPTZ,
update_process INT,
- state TEXT NOT NULL DEFAULT 'pending' CHECK (state IN ('pending','found','collecting','validating','reacting','cleanup','complete','error')),
+ state TEXT NOT NULL DEFAULT 'pending' CHECK (state IN ('pending','invalid','found','collecting','collected','validating','valid','reacting','reacted','cleaning','complete','error')),
template_output TEXT,
error_output TEXT
);
| lul_fre_100.marc | MARC21 | MARC8 | Unicorn GL3.1 | 100 records, French, pre-1923 |
| lul_fre_500.marc | MARC21 | MARC8 | Unicorn GL3.1 | 500 records, French, pre-1923 |
| jazz_1k.marc | MARC21 | MARC8 | Unicorn GL3.1 | 1000 records |
+| map_data.marc | MARC21 | UTF8 | Voyager (LoC) | 3 records with some geospatial metadata |
| music_5k.marc | MARC21 | MARC8 | Unicorn GL3.1 | 5000 records |
| hebrew.marc | MARC21 | MARC8 | III | Hebrew scripts, 25 records |
--- /dev/null
+03311cem 2200529 a 4500001000900000005001700009007000900026008003900035906004500074955003200119010001500151034005400166040001300220050002600233052001300259110004800272245013700320255008900457260014900546300010600695440007100801500004400872500003100916500007500947500006901022500020801091500007401299504001801373500005601391505041501447650005301862650006201915650006301977650004102040650003002081650003902111700001802150700002402168700003302192710002902225740006402254740010802318740009202426740010902518901003002627852012402657\1e13683783\1e20090125221700.0\1eaj canzn\1e040811s1989 cauae bh b s 0 eng\1e \1fa7\1fbcbc\1fcorigcop\1fdu\1fencip\1ff20\1fgn-geogmaps\1e \1faga09 2004-08-16 sent to CMT\1e \1fa2004631763\1e1 \1faa\1fb250000\1fdW1260000\1feW1240000\1ffN0420000\1fgN0400000\1e \1faDLC\1fcDLC\1e00\1faG4362.C6C5 s250\1fb.C37\1e \1fa4362\1fbC6\1e1 \1faCalifornia.\1fbDivision of Mines and Geology.\1e10\1faGeology of the northern California continental margin /\1fcH. Gary Greene and Michael P. Kennedy, editors ; [graphics by Ross Martin].\1e \1faScale 1:250,000 ;\1fbtransverse Mercator proj.\1fc(W 126⁰--W 124⁰/N 42⁰--N 40⁰).\1e \1fa[Sacramento, Calif.] :\1fbState of California, Resources Agency, Dept. of Conservation ;\1fa[Reston, Va.] :\1fbUnited States Geological Survey,\1fc1989.\1e \1fa4 maps :\1fbcol. ;\1fceach 91 x 69 cm., on sheets 116 x 99 cm. or smaller, folded in envelope 31 x 26 cm.\1e 0\1faCalifornia continental margin geologic map series ;\1fvmap no. 7A-7D\1e \1faDepths shown by contours and soundings.\1e \1faTitle from envelope cover.\1e \1fa"Northern California continental margin--Area 7 of 7"--Envelope cover.\1e \1faMaps attributed to various authors, compilers, and contributors.\1e \1faThe California continental margin geologic map series was developed cooperatively by the State of California Dept. of Conservation, Division of Mines and Geology, and the United States Geological Survey.\1e \1faSome sheets include chart of geophysical tracklines and data sources.\1e \1faBibliography.\1e \1faIncludes geologic explanation and location diagram.\1e0 \1faMap no. 7A. Geologic map of the northern California continental margin -- Map no. 7B. Earthquake epicenters and selected fault plane solutions of the northern California continental margin -- Map no. 7C. Bouguer gravity and magnetic anomaly map of the northern California continental margin -- Map no. 7D. Well location, geophysical trackline, and data source map of the northern California continental margin.\1e 0\1faGeology\1fzCalifornia\1fzPacific Coast Region\1fvMaps.\1e 0\1faFaults (Geology)\1fzCalifornia\1fzPacific Coast Region\1fvMaps.\1e 0\1faGravity anomalies\1fzCalifornia\1fzPacific Coast Region\1fvMaps.\1e 0\1faContinental shelf\1fzCalifornia\1fvMaps.\1e 0\1faCoasts\1fzCalifornia\1fvMaps.\1e 0\1faSubmerged lands\1fzCalifornia\1fvMaps.\1e1 \1faGreene, H. G.\1e1 \1faKennedy, Michael P.\1e1 \1faMartin, Ross,\1fccartographer.\1e2 \1faGeological Survey (U.S.)\1e02\1faGeologic map of the northern California continental margin.\1e02\1faEarthquake epicenters and selected fault plane solutions of the northern California continental margin.\1e02\1faBouguer gravity and magnetic anomaly map of the northern California continental margin.\1e02\1faWell location, geophysical trackline, and data source map of the northern California continental margin.\1e \1fa13683783\1fbSystem Local\1fc1\1e4 \1fagaaagpl\1fbBR1\1fbBR1\1fcStacks\1fjG4362.C6C5 s250.C37\1fp11223344\1fy0.00\1fxnonreference\1fxholdable\1fxcirculating\1fxvisible\1fzAvailable\1e\1d01918cem 22003854a 4500001000900000005001700009007000900026008003900035906004500074955003200119010001500151020001500166034005300181040001300234050002400247052001800271110002900289245016000318250004600478255011000524260004500634300003300679500005700712500004400769500012000813500008400933500009601017651004901113651005601162650004801218700007801266700003601344901003001380852012201410\1e14933689\1e20090127030616.0\1eaj canzn\1e070719s2007 vauabg cp a f 0 eng\1e \1fa7\1fbcbc\1fcorigcop\1fdu\1fencip\1ff20\1fgy-geogmaps\1e \1faga09 2007-07-19 sent to CMT\1e \1fa2007631309\1e \1fa1411318013\1e1 \1faa\1fb24000\1fdW1194500\1feW1192910\1ffN0374705\1fgN0374200\1e \1faDLC\1fcDLC\1e00\1faG4362.Y62 2007\1fb.G4\1e \1fa4362\1fbY62\1fbY6\1e2 \1faGeological Survey (U.S.)\1e10\1faMap of Yosemite Valley, Yosemite National Park and Wilderness, California, Mariposa County /\1fcUnited States, Department of the Interior, Geological Survey.\1e \1faShaded relief ed., limited revision 1970.\1e \1faScale 1:24,000 ;\1fbpolyconic proj.\1fc(W 119⁰45ʹ00ʺ--W 119⁰29ʹ10ʺ/N 37⁰47ʹ05ʺ--N 37⁰42ʹ00ʺ).\1e \1faReston, Va. :\1fbGeological Survey,\1fc2007.\1e \1fa1 map :\1fbcol. ;\1fc40 x 97 cm.\1e \1faRelief shown by contours, shading, and spot heights.\1e \1fa"Interior--Geological Survey ... 1995."\1e \1faOn verso: The geologic story of Yosemite Valley / by N. king Huber, in the footsteps of François E. Matthes. 2007.\1e \1faIncludes descriptive list of "Elevations of principal points" and location map.\1e \1faMap of California showing location of Yosemite National Park and ill. (some col.) on verso.\1e 0\1faYosemite Valley (Calif.)\1fvMaps, Topographic.\1e 0\1faYosemite National Park (Calif.)\1fvMaps, Topographic.\1e 0\1faGeology\1fzCalifornia\1fzYosemite Valley\1fvMaps.\1e12\1faHuber, N. King\1fq(Norman King),\1fd1926-\1ftGeologic story of Yosemite Valley.\1e1 \1faMatthes, François,\1fd1874-1948.\1e \1fa14933689\1fbSystem Local\1fc3\1e4 \1fagaaagpl\1fbBR1\1fbBR1\1fcStacks\1fjG4362.Y62 2007.G4\1fp1090109\1fy0.00\1fxnonreference\1fxholdable\1fxcirculating\1fxvisible\1fzIn process\1e\1d03375cem 2200529 a 4500001000900000005001700009007000900026008003900035906004500074955003200119010001500151034005400166040001300220050002600233052001300259110004800272245015300320255008900473260014900562300010600711440007100817500004400888500003100932500008100963500006901044500020801113500007401321504001801395500005601413505043901469650005301908650006201961650006302023650004102086650003002127650003902157700001802196700002402214700002802238710002902266740007002295740011402365740009802479740011502577901003002692852012302722\1e13683740\1e20090126032558.0\1eaj canzn\1e040811s1987 cauae bh b s 0 eng\1e \1fa7\1fbcbc\1fcorigcop\1fdu\1fencip\1ff20\1fgn-geogmaps\1e \1faga09 2004-08-16 sent to CMT\1e \1fa2004631762\1e1 \1faa\1fb250000\1fdW1220000\1feW1180000\1ffN0340000\1fgN0320000\1e \1faDLC\1fcDLC\1e00\1faG4362.C6C5 s250\1fb.C33\1e \1fa4362\1fbC6\1e1 \1faCalifornia.\1fbDivision of Mines and Geology.\1e10\1faGeology of the outer-southern California continental margin /\1fcH. Gary Greene and Michael P. Kennedy, editors ; [graphics by James Dennis Williams].\1e \1faScale 1:250,000 ;\1fbtransverse Mercator proj.\1fc(W 122⁰--W 118⁰/N 34⁰--N 32⁰).\1e \1fa[Sacramento, Calif.] :\1fbState of California, Resources Agency, Dept. of Conservation ;\1fa[Reston, Va.] :\1fbUnited States Geological Survey,\1fc1987.\1e \1fa4 maps :\1fbcol. ;\1fceach 91 x 78 cm., on sheets 140 x 85 cm. or smaller, folded in envelope 31 x 26 cm.\1e 0\1faCalifornia continental margin geologic map series ;\1fvmap no. 3A-3D\1e \1faDepths shown by contours and soundings.\1e \1faTitle from envelope cover.\1e \1fa"Outer-southern California continental margin--Area 3 of 7"--Envelope cover.\1e \1faMaps attributed to various authors, compilers, and contributors.\1e \1faThe California continental margin geologic map series was developed cooperatively by the State of California Dept. of Conservation, Division of Mines and Geology, and the United States Geological Survey.\1e \1faSome sheets include chart of geophysical tracklines and data sources.\1e \1faBibliography.\1e \1faIncludes geologic explanation and location diagram.\1e0 \1faMap no. 3A. Geologic map of the outer-southern California continental margin -- Map no. 3B. Earthquake epicenters and selected fault plane solutions of the outer-southern California continental margin -- Map no. 3C. Bouguer gravity and magnetic anomaly map of the outer-southern California continental margin -- Map no. 3D. Well location, geophysical trackline, and data source map of the outer-southern California continental margin.\1e 0\1faGeology\1fzCalifornia\1fzPacific Coast Region\1fvMaps.\1e 0\1faFaults (Geology)\1fzCalifornia\1fzPacific Coast Region\1fvMaps.\1e 0\1faGravity anomalies\1fzCalifornia\1fzPacific Coast Region\1fvMaps.\1e 0\1faContinental shelf\1fzCalifornia\1fvMaps.\1e 0\1faCoasts\1fzCalifornia\1fvMaps.\1e 0\1faSubmerged lands\1fzCalifornia\1fvMaps.\1e1 \1faGreene, H. G.\1e1 \1faKennedy, Michael P.\1e1 \1faWilliams, James Dennis.\1e2 \1faGeological Survey (U.S.)\1e02\1faGeologic map of the outer-southern California continental margin.\1e02\1faEarthquake epicenters and selected fault plane solutions of the outer-southern California continental margin.\1e02\1faBouguer gravity and magnetic anomaly map of the outer-southern California continental margin.\1e02\1faWell location, geophysical trackline, and data source map of the outer-southern California continental margin.\1e \1fa13683740\1fbSystem Local\1fc2\1e4 \1fagaaagpl\1fbBR1\1fbBR1\1fcStacks\1fjG4362.C6C5 s250.C33\1fp2233444\1fy0.00\1fxnonreference\1fxholdable\1fxcirculating\1fxvisible\1fzAvailable\1e\1d
\ No newline at end of file
},
disconnect : function ( onerror ) {
+ this.connected = false;
+ return true;
+ // disconnect returns nothing, which is null, which is not true, cause the following to always run ... arg.
if (!this.session.disconnect()) {
if (onerror) onerror(this.session);
return false;
}
- return true;
},
<script type="text/javascript" src="/js/dojo/dojo/dojo.js"></script>
<script type="text/javascript">
dojo.require('dojo.parser');
+ dojo.require('dojo.string');
function status_update (markup) {
dojo.byId('status_text').innerHTML = markup;
_debug("launching report output view at URL: " + url);
if(isXUL())
xulG.new_tab('/xul/server/util/rbrowser.xul?url=' + url, /* this comes from urls.XUL_REMOTE_BROWSER */
- {tab_name: dojo.string.substitute( rpt_strings.FOLDER_WINDOW_REPORT_OUTPUT, [r.name] ), browser:true},
+ {tab_name: dojo.string.substitute( rpt_strings.FOLDER_WINDOW_REPORT_OUTPUT, [r.name()] ), browser:true},
{no_xulG:false, show_nav_buttons:true, show_print_button:true});
else {
//goTo(url);