#!perl -w # # $Id: //websites/unixwiz/newroot/techtips/makescripts.txt#2 $ # # written by : Stephen J. Friedl 2002/06/06 # Software Consultant # Tustin, California # steve@unixwiz.net # # ========================================================= # This program is in the public domain and may be used in # any way without restrictions. # ========================================================= # # This program is designed to help unload data from a Sybase Adaptive Server # Anywhere 6 database by creating the SQL scripts that will extract the # schemas and the data. These scripts are to be loaded and run from the # DBISQL tool and run interactively. # # Note that it's possible to do database unloads via the DBUNLOAD tool, # but in our particular application we were unable to locate a database # user that had permission to do this. So we resort to DBISQL. # # *This* program does no database access of any kind, and need not be run # on the same system as the database. We simply read a text file of tables # and produce a few SQL output files. # # For our documentation, we presume that we're unloading all our data on # a Windows machine to C:\UNLOAD (plus subdirectories). # # GET LIST OF TABLES # ------------------ # # We start by fetching the list of all tables in the system, and this # can be done with the "sp_tables" stored procedure that is a # front end to a select of SYSTABLE. From the interactive SQL # window, we run the output to a file that we transfer back to # our machine with perl. # # Output redirection in DBISQL is done with >#, so the command # we run is # # sp_tables ># c:\unload\tables.txt # # which creates a TABLES.TXT file with comma-separated output: # # 'MYDATABASE','SYS','SYSARTICLECOL','SYSTEM TABLE', # # database, creator, tablename, tabtype # # This script was developed for unloading on a system with only a # single database ("MYDATABASE"), so we don't do any disambiguation # of the data that might be necessary in other applications. # # COMMAND LINE # ------------ # # This program normally takes several parametrs, and they are long enough # that it's probably wise to put them in a small batch file or shell # script. We'll presume that you're doing just this. # # The command-line parameters are: # # # --help Show this help listing to the standard error and exit. # # --verbose Show a bit more debugging. This shows most of the input # parameters, plus each table that we ultimately do not # process due to the "--skip" parameters. # # --dir=DIR The generated script files assume DIR is the top level # place to store data on the target system, and this need # not be the same as the current system so the path is not # checked. The generated scripts will save their output to # $DIR\data\ and $DIR\schemas\ subdirectories # # --skip=ITEM Ignore any table that has a component of "ITEM" as any # of its parts. This could be the database name, the # creator, the table name, or the table type. For instance, # to get only "regular" tables and none of the system ones" # # --skip="VIEW" --skip="SYSTEM TABLE" --skip=dbo # # This mechanism puts all these items in the same namespace, # so this falls apart if somebody does (say) names a table # "VIEW" - sorry. # # --windows TRUE if the generated script should use \\ in output paths # instead of /. We found after we added this option that the # NT version of Sybase works fine with UNIX-style forward # slashes slashes, so this option is not necessary. # use strict; my %SKIP = (); # items we don't care about $0 =~ s|.*[/\\]||; # dump path part of program name my $TABLEFILE = undef; # input file with table data my $WORKDIR = undef; # scripts generate output to $WORKDIR my $verbose = 0; # show a bit more detail my $windows = 0; # target machine is Windows my $schemascript = "queryschema.sql"; my $datascript = "querydata.sql"; foreach ( @ARGV ) { if ( m/^--help/i ) # --help { print STDERR <$schemascript") or die "ERROR: cannot create $schemascript"; open(DATA, ">$datascript") or die "ERROR: cannot create $datascript"; my $count = 0; open(F, $TABLEFILE) or die "ERROR: cannot open table file {$TABLEFILE}\n"; while ( ) { #---------------------------------------------------------------- # First split the input line (by commas) into individual parts, # and we dump the quotes because the commas give us sufficient # splittage. # # Then see if any of these parts are worth ignoring. # s/\s+$//; # dump trailing whitespace s/'//g; # dump single quotes my($dbname, $creator, $table, $type) = split( m/,/ ); if ( $SKIP{uc $dbname} or $SKIP{uc $creator} or $SKIP{uc $table} or $SKIP{uc $type} ) { print STDERR "Skipping $dbname.$creator.$table ($type)\n" if $verbose; next; } $count++; # ---------------------------------------------------------------- # CREATE SCHEMA REQUEST # # The schemas are queried with the "sp_columns" stored procedure, # and we provide the table and creator names to be sure that we # don't have clashes by multiple tables having the same name from # different creators. # my $schemafile = fixpath( "$schemadir/${creator}_$table.sql" ); print SCHEMAS "sp_columns $table,$creator ># $schemafile;\n"; # ---------------------------------------------------------------- # CREATE DATA REQUEST # # For unloading the data we use a simple UNLOAD script to the # output file. # my $datafile = fixpath( "$datadir/${creator}_$table.txt" ); print DATA "UNLOAD TABLE $creator.$table TO '$datafile';\n"; } close SCHEMAS; close DATA; print "Wrote $count requests to script files\n"; # # fixpath # # Given a path in UNIX format (forward slashes), convert it # to the proper Windows path notation with doubled backslashes # if required. # sub fixpath { my $path = shift; $path =~ s|/|\\\\|g if $windows; return $path; }