diff options
Diffstat (limited to 'bin')
-rwxr-xr-x | bin/add-history-records.pl | 139 | ||||
-rw-r--r-- | bin/backup-dvd | 45 | ||||
-rw-r--r-- | bin/billco-upload | 20 | ||||
-rwxr-xr-x | bin/bind.export | 20 | ||||
-rwxr-xr-x | bin/bind.import | 194 | ||||
-rwxr-xr-x | bin/dbdef-create | 2 | ||||
-rwxr-xr-x | bin/fix-sequences | 2 | ||||
-rwxr-xr-x | bin/generate-raddb | 14 | ||||
-rwxr-xr-x | bin/generate-table-module | 89 | ||||
-rwxr-xr-x | bin/masonize | 2 | ||||
-rw-r--r-- | bin/pg-readonly | 24 | ||||
-rwxr-xr-x | bin/postfix.export | 2 | ||||
-rwxr-xr-x | bin/print-schema | 7 | ||||
-rwxr-xr-x | bin/rate.import | 28 | ||||
-rwxr-xr-x | bin/rt-drop-tables | 29 | ||||
-rwxr-xr-x | bin/slony-setup | 53 | ||||
-rwxr-xr-x | bin/svc_broadband.renumber | 84 |
17 files changed, 112 insertions, 642 deletions
diff --git a/bin/add-history-records.pl b/bin/add-history-records.pl deleted file mode 100755 index fbf9d09d9..000000000 --- a/bin/add-history-records.pl +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/perl - -die "This is broken. Don't use it!\n"; - -use strict; -use FS::UID qw(adminsuidsetup); -use FS::Record qw(qsearchs qsearch); - -use Data::Dumper; - -my @tables = qw(svc_acct svc_broadband svc_domain svc_external svc_forward svc_www cust_svc domain_record); -#my @tables = qw(svc_www); - -my $user = shift or die &usage; -my $dbh = adminsuidsetup($user); - -my $dbdef = FS::Record::dbdef; - -foreach my $table (@tables) { - - my $h_table = 'h_' . $table; - my $cnt = 0; - my $t_cnt = 0; - - eval "use FS::${table}"; - die $@ if $@; - eval "use FS::${h_table}"; - die $@ if $@; - - print "Adding history records for ${table}...\n"; - - my $dbdef_table = $dbdef->table($table); - my $pkey = $dbdef_table->primary_key; - - foreach my $rec (qsearch($table, {})) { - - #my $h_rec = qsearchs( - # $h_table, - # { $pkey => $rec->getfield($pkey) }, - # eval "FS::${h_table}->sql_h_searchs(time)", - #); - - my $h_rec = qsearchs( - $h_table, - { $pkey => $rec->getfield($pkey) }, - "DISTINCT ON ( $pkey ) *", - "AND history_action = 'insert' ORDER BY $pkey ASC, history_date DESC", - '', - 'AS maintable', - ); - - unless ($h_rec) { - my $h_insert_rec = $rec->_h_statement('insert', 1); - #print $h_insert_rec . "\n"; - $dbh->do($h_insert_rec); - die $dbh->errstr if $dbh->err; - $dbh->commit or die $dbh->errstr; - $cnt++; - } - - - $t_cnt++; - - } - - print "History records inserted into $h_table: $cnt\n"; - print " Total records in $table: $t_cnt\n"; - - print "\n"; - -} - -foreach my $table (@tables) { - - my $h_table = 'h_' . $table; - my $cnt = 0; - - eval "use FS::${table}"; - die $@ if $@; - eval "use FS::${h_table}"; - die $@ if $@; - - print "Adding insert records for unmatched delete records on ${table}...\n"; - - my $dbdef_table = $dbdef->table($table); - my $pkey = $dbdef_table->primary_key; - - #SELECT * FROM h_svc_www - #DISTINCT ON ( $pkey ) ? - my $where = " - WHERE ${pkey} in ( - SELECT ${h_table}1.${pkey} - FROM ${h_table} as ${h_table}1 - WHERE ( - SELECT count(${h_table}2.${pkey}) - FROM ${h_table} as ${h_table}2 - WHERE ${h_table}2.${pkey} = ${h_table}1.${pkey} - AND ${h_table}2.history_action = 'delete' - ) > 0 - AND ( - SELECT count(${h_table}3.${pkey}) - FROM ${h_table} as ${h_table}3 - WHERE ${h_table}3.${pkey} = ${h_table}1.${pkey} - AND ( ${h_table}3.history_action = 'insert' - OR ${h_table}3.history_action = 'replace_new' ) - ) = 0 - GROUP BY ${h_table}1.${pkey})"; - - - my @h_recs = qsearch( - $h_table, { }, - "DISTINCT ON ( $pkey ) *", - $where, - '', - '' - ); - - foreach my $h_rec (@h_recs) { - #print "Adding insert record for deleted record with pkey='" . $h_rec->getfield($pkey) . "'...\n"; - my $class = 'FS::' . $table; - my $rec = $class->new({ $h_rec->hash }); - my $h_insert_rec = $rec->_h_statement('insert', 1); - #print $h_insert_rec . "\n"; - $dbh->do($h_insert_rec); - die $dbh->errstr if $dbh->err; - $dbh->commit or die $dbh->errstr; - $cnt++; - } - - print "History records inserted into $h_table: $cnt\n"; - -} - - - -sub usage { - die "Usage:\n add-history-records.pl user\n"; -} - diff --git a/bin/backup-dvd b/bin/backup-dvd deleted file mode 100644 index d0314b469..000000000 --- a/bin/backup-dvd +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash - -database="freeside" -DEVICE="/dev/hda" - -su freeside -c "pg_dump $database" >/var/backups/$database.sql - -DATE=$(date +%Y-%m-%d) - -#NOTE: These two paths must end in a / in -#order to correctly build up the other paths -#BACKUP_DIR="/backup/directory/" -BACKUP_DIR="/backup/" - #TEMP_BACKUP_FILES_DIR="/backup/temp/" - -BACKUP_FILE=$BACKUP_DIR"backup-"$DATE".tar.bz2" - #DATABASE_FILE=$TEMP_BACKUP_FILES_DIR"foo-"$DATE".sql" - - #These directories shouldn't end in a / although - #I don't think it will cause any problems if - #they do. There should be a space at the end though - #to ensure the database file gets concatenated correctly. - #SOURCE="/a/location /other/locations " $DATABASE_FILE - -#echo Removing old backup directories -rm -rf $BACKUP_DIR - #rm -rf $TEMP_BACKUP_FILES_DIR - -#echo Creating new backup directories -mkdir $BACKUP_DIR - #mkdir $TEMP_BACKUP_FILES_DIR - - #echo Creating database backup - #pg_dump -U username -f $DATABASE_FILE databaseName - -#echo Backing up $SOURCE to file $BACKUP_FILE -#tar -cvpl -f $BACKUP_FILE --anchored --exclude /backup / -tar -cjpl -f $BACKUP_FILE --anchored --exclude /backup / - - ##This is not necessary and possibly harmful for DVD+RW media - #echo Quick blanking media - #dvd+rw-format -blank /dev/hdc - -#echo Burning backup -growisofs -dvd-compat -Z $DEVICE -quiet -r -J $BACKUP_FILE diff --git a/bin/billco-upload b/bin/billco-upload deleted file mode 100644 index ce4a43d5f..000000000 --- a/bin/billco-upload +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh - -AGENTNUMS="1 2 3" - -date=`date +"%Y%m%d"` -dir="/usr/local/etc/freeside/export.DBI:Pg:dbname=freeside/cust_bill" -cd "$dir" - -for AGENTNUM in $AGENTNUMS; do - - for a in header detail; do - mv agentnum$AGENTNUM-$a.csv agentnum$AGENTNUM-$date-$a.csv - done - - zip agentnum$AGENTNUM-$date.zip agentnum$AGENTNUM-$date-header.csv agentnum$AGENTNUM-$date-detail.csv - - echo $dir/agentnum$AGENTNUM-$date.zip - -done - diff --git a/bin/bind.export b/bin/bind.export index 286e43a2d..d0b93797e 100755 --- a/bin/bind.export +++ b/bin/bind.export @@ -43,12 +43,10 @@ foreach my $export ( @exports ) { open(NAMED_CONF,">$prefix/named.conf") or die "can't open $prefix/named.conf: $!"; - if ( -e "$prefix/named.conf.HEADER" ) { - open(CONF_HEADER,"<$prefix/named.conf.HEADER") - or die "can't open $prefix/named.conf.HEADER: $!"; - while (<CONF_HEADER>) { print NAMED_CONF $_; } - close CONF_HEADER; - } + open(CONF_HEADER,"<$prefix/named.conf.HEADER") + or die "can't open $prefix/named.conf.HEADER: $!"; + while (<CONF_HEADER>) { print NAMED_CONF $_; } + close CONF_HEADER; my $zonepath = $export->option('zonepath'); $zonepath =~ s/\/$//; @@ -147,12 +145,10 @@ foreach my $sexport ( @sexports ) { #false laziness with above open(NAMED_CONF,">$prefix/named.conf") or die "can't open $prefix/named.conf: $!"; - if ( -e "$prefix/named.conf.HEADER" ) { - open(CONF_HEADER,"<$prefix/named.conf.HEADER") - or die "can't open $prefix/named.conf.HEADER: $!"; - while (<CONF_HEADER>) { print NAMED_CONF $_; } - close CONF_HEADER; - } + open(CONF_HEADER,"<$prefix/named.conf.HEADER") + or die "can't open $prefix/named.conf.HEADER: $!"; + while (<CONF_HEADER>) { print NAMED_CONF $_; } + close CONF_HEADER; my $masters = $sexport->option('master'); diff --git a/bin/bind.import b/bin/bind.import index 1cdf5672c..41313fba6 100755 --- a/bin/bind.import +++ b/bin/bind.import @@ -1,33 +1,22 @@ #!/usr/bin/perl -w # -# REQUIRED: -# -p: part number for domains -# -# -n: named.conf file (or an include file with zones you want to import), -# for example root@ns.isp.com:/var/named/named.conf -# -# OPTIONAL: -# -d: dry-run, debug: don't insert any records, just dump debugging output # -s: import slave zones as master. useful if you need to recreate your # primary nameserver from a secondary -# -c dir: override patch for downloading zone files (for example, when -# downloading zone files from chrooted bind) +# -c chroot_dir: import data from chrooted bind (corrects the path for +# downloading zone files # # need to manually put header in # /usr/local/etc/freeside/export.<datasrc./bind/<machine>/named.conf.HEADER -# (or, nowadays, better just to include the file freeside exports) use strict; - -use vars qw($domain_svcpart); - +use vars qw( %d_part_svc ); use Getopt::Std; -use Data::Dumper; +use Term::Query qw(query); #use BIND::Conf_Parser; #use DNS::ZoneParse 0.81; -use Net::SCP qw(scp iscp); - +#use Net::SCP qw(iscp); +use Net::SCP qw(scp); use FS::UID qw(adminsuidsetup datasrc); use FS::Record qw(qsearch); #qsearchs); #use FS::svc_acct_sm; @@ -36,8 +25,8 @@ use FS::domain_record; #use FS::svc_acct; #use FS::part_svc; -use vars qw($opt_p $opt_n $opt_s $opt_c $opt_d); -getopts("p:n:sc:d"); +use vars qw($opt_s $opt_c); +getopts("sc:"); my $user = shift or die &usage; adminsuidsetup $user; @@ -49,19 +38,42 @@ use vars qw($spooldir); $spooldir = "/usr/local/etc/freeside/export.". datasrc. "/bind"; mkdir $spooldir unless -d $spooldir; -$domain_svcpart = $opt_p; - -my $named_conf = $opt_n; +%d_part_svc = + map { $_->svcpart, $_ } qsearch('part_svc',{'svcdb'=>'svc_domain'}); +print "\n\n", + ( join "\n", map "$_: ".$d_part_svc{$_}->svc, sort keys %d_part_svc ), + "\n\n"; +use vars qw($domain_svcpart); +$^W=0; #Term::Query isn't -w-safe +$domain_svcpart = + query "Enter part number for domains: ", 'irk', [ keys %d_part_svc ]; +$^W=1; + +print "\n\n", <<END; +Enter the location and name of your primary named.conf file, for example +"ns.isp.com:/var/named/named.conf" +END +my($named_conf)=&getvalue(":"); + use vars qw($named_machine $prefix); $named_machine = (split(/:/, $named_conf))[0]; -my $pnamed_machine = $named_machine; -$pnamed_machine =~ s/^[\w\-]+\@//; -$prefix = "$spooldir/$pnamed_machine"; +$prefix = "$spooldir/$named_machine"; mkdir $prefix unless -d $prefix; -#iscp("$named_conf","$prefix/named.conf.import"); -scp("$named_conf","$prefix/named.conf.import"); +#iscp("root\@$named_conf","$prefix/named.conf.import"); +scp("root\@$named_conf","$prefix/named.conf.import"); + + +sub getvalue { + my $prompt = shift; + $^W=0; # Term::Query isn't -w-safe + my $return = query $prompt, ''; + $^W=1; + $return; +} + +print "\n\n"; ## @@ -75,7 +87,7 @@ print "\nBIND import completed.\n"; ## sub usage { - die "Usage:\n\n bind.import -p partnum -n \"user\@machine:/path/to/named.conf\" [ -s ] [ -c chroot_dir ] [ -f ] user\n"; + die "Usage:\n\n bind.import user\n"; } ######## @@ -85,13 +97,11 @@ BEGIN { use BIND::Conf_Parser; use vars qw(@ISA $named_dir); @ISA = qw(BIND::Conf_Parser); - - $named_dir = 'COULD_NOT_FIND_NAMED_DIRECTORY_TRY_SETTING_-C_OPTION'; + sub handle_option { my($self, $option, $argument) = @_; return unless $option eq "directory"; $named_dir = $argument; - #warn "found named dir: $named_dir\n"; } sub handle_zone { @@ -120,25 +130,20 @@ BEGIN { if ( $type eq 'slave' && !$main::opt_s ) { - if ( $main::opt_d ) { - - use Data::Dumper; - print "$name: ". Dumper($options); - - } else { - - foreach my $master ( @{ $options->{masters} } ) { - my $domain_record = new FS::domain_record( { - 'svcnum' => $domain->svcnum, - 'reczone' => '@', - 'recaf' => 'IN', - 'rectype' => '_mstr', - 'recdata' => $master, - } ); - my $error = $domain_record->insert; - die $error if $error; - } - + #use Data::Dumper; + #print Dumper($options); + #exit; + + foreach my $master ( @{ $options->{masters} } ) { + my $domain_record = new FS::domain_record( { + 'svcnum' => $domain->svcnum, + 'reczone' => '@', + 'recaf' => 'IN', + 'rectype' => '_mstr', + 'recdata' => $master, + } ); + my $error = $domain_record->insert; + die $error if $error; } } elsif ( $type eq 'master' || ( $type eq 'slave' && $main::opt_s ) ) { @@ -148,79 +153,54 @@ BEGIN { use File::Basename; my $basefile = basename($file); my $sourcefile = $file; - if ( $main::opt_c ) { - $sourcefile = "$main::opt_c/$sourcefile" if $main::opt_c; - } else { - $sourcefile = "$named_dir/$sourcefile" unless $file =~ /^\//; - } + $sourcefile = "$named_dir/$sourcefile" unless $file =~ /^\//; + $sourcefile = "$main::opt_c/$sourcefile" if $main::opt_c; use Net::SCP qw(iscp scp); - #iscp("$main::named_machine:$sourcefile", - # "$main::prefix/$basefile.import"); - scp("$main::named_machine:$sourcefile", - "$main::prefix/$basefile.import"); + scp("root\@$main::named_machine:$sourcefile", + "$main::prefix/$basefile.import"); use DNS::ZoneParse 0.84; my $zone = DNS::ZoneParse->new("$main::prefix/$basefile.import"); my $dump = $zone->dump; - - if ( $main::opt_d ) { - - use Data::Dumper; - print "$name: ". Dumper($dump); - - } else { + + #use Data::Dumper; + #print "$name: ". Dumper($dump); + #exit; - foreach my $rectype ( keys %$dump ) { - if ( $rectype =~ /^SOA$/i ) { - my $rec = $dump->{$rectype}; - $rec->{email} =~ s/\@/\./; + foreach my $rectype ( keys %$dump ) { + if ( $rectype =~ /^SOA$/i ) { + my $rec = $dump->{$rectype}; + my $domain_record = new FS::domain_record( { + 'svcnum' => $domain->svcnum, + 'reczone' => $rec->{origin}, + 'recaf' => 'IN', + 'rectype' => $rectype, + 'recdata' => + $rec->{primary}. ' '. $rec->{email}. ' ( '. + join(' ', map $rec->{$_}, + qw( serial refresh retry expire minimumTTL ) ). + ' )', + } ); + my $error = $domain_record->insert; + die $error if $error; + } else { + #die $dump->{$rectype}; + foreach my $rec ( @{ $dump->{$rectype} } ) { my $domain_record = new FS::domain_record( { 'svcnum' => $domain->svcnum, - 'reczone' => $rec->{origin}, - 'recaf' => 'IN', + 'reczone' => $rec->{name}, + 'recaf' => $rec->{class}, 'rectype' => $rectype, - 'recdata' => - $rec->{primary}. ' '. $rec->{email}. ' ( '. - join(' ', map $rec->{$_}, - qw( serial refresh retry expire minimumTTL ) ). - ' )', + 'recdata' => ( $rectype =~ /^MX$/i + ? $rec->{priority}. ' '. $rec->{host} + : $rec->{host} ), } ); my $error = $domain_record->insert; die $error if $error; - } else { - #die $dump->{$rectype}; - - my $datasub; - if ( $rectype =~ /^MX$/i ) { - $datasub = sub { $_[0]->{priority}. ' '. $_[0]->{host}; }; - } elsif ( $rectype =~ /^TXT$/i ) { - $datasub = sub { $_[0]->{text}; }; - } else { - $datasub = sub { $_[0]->{host}; }; - } - - foreach my $rec ( @{ $dump->{$rectype} } ) { - my $domain_record = new FS::domain_record( { - 'svcnum' => $domain->svcnum, - 'reczone' => $rec->{name}, - 'recaf' => $rec->{class} || 'IN', - 'rectype' => $rectype, - 'recdata' => &{$datasub}($rec), - } ); - my $error = $domain_record->insert; - if ( $error ) { - warn "$error inserting ". - $rec->{name}. ' . '. $domain->domain. "\n"; - warn Dumper($rec); - #system('cat',"$main::prefix/$basefile.import"); - die; - } - } } } - } #} else { diff --git a/bin/dbdef-create b/bin/dbdef-create index fea02c8c5..a449d67cc 100755 --- a/bin/dbdef-create +++ b/bin/dbdef-create @@ -2,7 +2,7 @@ use strict; use DBI; -use DBIx::DBSchema 0.26; +use DBIx::DBSchema 0.22; use FS::UID qw(adminsuidsetup datasrc driver_name); my $user = shift or die &usage; diff --git a/bin/fix-sequences b/bin/fix-sequences index dc4abd751..2ff89d3e5 100755 --- a/bin/fix-sequences +++ b/bin/fix-sequences @@ -4,7 +4,7 @@ use strict; use DBI; -use DBIx::DBSchema 0.26; +use DBIx::DBSchema 0.21; use DBIx::DBSchema::Table; use DBIx::DBSchema::Column; use DBIx::DBSchema::ColGroup::Unique; diff --git a/bin/generate-raddb b/bin/generate-raddb index af21c05a8..f946b05b3 100755 --- a/bin/generate-raddb +++ b/bin/generate-raddb @@ -1,7 +1,8 @@ #!/usr/bin/perl # usage: generate-raddb radius-server/raddb/dictionary* >raddb.pm -# i.e.: generate-raddb ~/freeradius/freeradius-1.0.5/share/dictionary* ~/wirelessoceans/dictionary.ip3networks ~/wtxs/dictionary.mot.canopy >raddb.pm.new +# i.e.: generate-raddb ~/src/freeradius-0.2/raddb/dictionary* >FS/raddb.pm + print <<END; package FS::raddb; use vars qw(%attrib); @@ -28,24 +29,17 @@ while (<>) { #print "$2\n"; } -foreach ( sort keys %hash ) { +foreach ( keys %hash ) { # print "$_\n" if length($_)>24; # print substr($_,0,24),"\n" if length($_)>24; # $max = length($_) if length($_)>$max; # have to fudge things since everything >24 is *not* unique #print " '". substr($_,0,24). "' => '$hash{$_}',\n"; - print " '$_' ". ( " " x (24-length($_) ) ). "=> '$hash{$_}',\n"; + print " '$_' => '$hash{$_}',\n"; } print <<END; - - #NETC.NET.AU (RADIATOR?) - 'authentication_type' => 'Authentication-Type', - - #wtxs (dunno) - #'radius_operator' => 'Radius-Operator', - ); 1; diff --git a/bin/generate-table-module b/bin/generate-table-module deleted file mode 100755 index fcc3f1d1f..000000000 --- a/bin/generate-table-module +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/perl - -use FS::Schema qw( dbdef_dist ); - -my $table = shift; - -### -# add a new FS/FS/table.pm -### - -my %ut = ( #just guesses - 'int' => 'number', - 'number' => 'float', - 'varchar' => 'text', - 'text' => 'text', -); - -my $dbdef_table = dbdef_dist->table($table) - or die "define table in Schema.pm first"; -my $primary_key = $dbdef_table->primary_key; - -open(SRC,"<eg/table_template.pm") or die $!; --e "FS/FS/$table.pm" and die "FS/FS/$table.pm already exists!"; -open(DEST,">FS/FS/$table.pm") or die $!; - -while (my $line = <SRC>) { - - $line =~ s/table_name/$table/g; - - if ( $line =~ /^=item\s+field\s+-\s+description\s*$/ ) { - - foreach my $column ( $dbdef_table->columns ) { - print DEST "=item $column - "; - print DEST "primary key" - if $column eq $primary_key; - print DEST "\n\n"; - } - next; - - } elsif ( $line=~ /^(\s*)\$self->ut_numbern\('primary_key'\)\s*/ ) { - - print DEST "$1\$self->ut_numbern('$primary_key')\n" - if $primary_key; - next; - - } elsif ( - $line =~ /^(\s*)\|\|\s+\$self->ut_number\('validate_other_fields'\)\s*/ - ) { - - foreach my $column ( grep { $_ ne $primary_key } $dbdef_table->columns ) { - my $ut = $ut{$dbdef_table->column($column)->type}; - $ut .= 'n' if $dbdef_table->column($column)->null; - print DEST "$1|| \$self->ut_$ut('$column')\n"; - } - next; - - } - - print DEST $line; -} - -close SRC; -close DEST; - -### -# add FS/t/table.t -### - -open(TEST,">FS/t/$table.t") or die $!; -print TEST <<ENDTEST; -BEGIN { \$| = 1; print "1..1\\n" } -END {print "not ok 1\\n" unless \$loaded;} -use FS::$table; -\$loaded=1; -print "ok 1\\n"; -ENDTEST -close TEST; - -### -# add them to MANIFEST -### - -system('cvs edit FS/MANIFEST'); - -open(MANIFEST,">>FS/MANIFEST") or die $!; -print MANIFEST "FS/$table.pm\n", - "t/$table.t\n"; -close MANIFEST; - diff --git a/bin/masonize b/bin/masonize index 509ef3ec8..169ba718f 100755 --- a/bin/masonize +++ b/bin/masonize @@ -52,7 +52,7 @@ foreach $file ( split(/\n/, `find . -depth -print`) ) { $mode = 'html'; next; } - die "unterminated <%= ??? (in $file):"; + die 'unterminated <%= ???'; } elsif ( $mode eq 'perlc' ) { diff --git a/bin/pg-readonly b/bin/pg-readonly deleted file mode 100644 index ad69fbde2..000000000 --- a/bin/pg-readonly +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/perl -# -# hack to update/add read-only permissions for a user on the db -# -# usage: pg-readonly freesideuser readonlyuser - -use strict; -use DBI; -use FS::UID qw(adminsuidsetup); -use FS::Record qw(dbdef); - -my $user = shift or die &usage; -my $rouser = shift or die &usage; - -my $dbh = adminsuidsetup $user; - -foreach my $table ( dbdef->tables ) { - $dbh->do("GRANT SELECT ON $table TO $rouser"); - $dbh->commit(); - if ( my $pkey = dbdef->table($table)->primary_key ) { - $dbh->do("GRANT SELECT ON ${table}_${pkey}_seq TO $rouser"); - $dbh->commit(); - } -} diff --git a/bin/postfix.export b/bin/postfix.export index 61380da59..dbb08ceb9 100755 --- a/bin/postfix.export +++ b/bin/postfix.export @@ -40,7 +40,7 @@ foreach my $export ( @exports ) { my $srcsvc_acct = $svc_forward->srcsvc_acct; if ( $srcsvc_acct ) { ( $username, $domain ) = ( $srcsvc_acct->username, $srcsvc_acct->domain ); - } elsif ( $svc_forward->src =~ /^([^@]*)\@([^@]+)$/ ) { + } elsif ( $svc_forward->src =~ /([^@]*)\@([^@]+)$/ ) { ( $username, $domain ) = ( $1, $2 ); } else { die "bad svc_forward record? svcnum ". $svc_forward->svcnum. "\n"; diff --git a/bin/print-schema b/bin/print-schema deleted file mode 100755 index 886e3250b..000000000 --- a/bin/print-schema +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/perl - -use DBIx::DBSchema; - -$l = load DBIx::DBSchema "/usr/local/etc/freeside/dbdef.DBI:Pg:dbname=freeside"; - -print $l->pretty_print, "\n"; diff --git a/bin/rate.import b/bin/rate.import index fdd756d72..29b5239d5 100755 --- a/bin/rate.import +++ b/bin/rate.import @@ -59,25 +59,15 @@ while ( my $row = $sth->fetchrow_hashref ) { $prefix = $2; } - my @rate_prefix = (); - if ( $prefix =~ /\d/ ) { - - @rate_prefix = map { - #warn $row->{'Country'}. ": $prefixprefix$_\n"; - new FS::rate_prefix { - 'countrycode' => $row->{'Code'}, - 'npa' => $prefixprefix.$_, - }; - } - split(/\s*[;,]\s*/, $prefix); - - } else { - @rate_prefix = ( new FS::rate_prefix { - 'countycode' => $row->{'Code'}, - 'npa' => '', - }; - ); - } + my @rate_prefix = map { + #warn $row->{'Country'}. ": $prefixprefix$_\n"; + new FS::rate_prefix { + 'countrycode' => $row->{'Code'}, + 'npa' => $prefixprefix.$_, + }; + } + split(/\s*[;,]\s*/, $prefix); + my $dest_detail = new FS::rate_detail { 'ratenum' => $ratenum, diff --git a/bin/rt-drop-tables b/bin/rt-drop-tables deleted file mode 100755 index b027542b3..000000000 --- a/bin/rt-drop-tables +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/perl - -my @tables = qw( -Attachments -Queues -Links -Principals -Groups -ScripConditions -Transactions -Scrips -ACL -GroupMembers -CachedGroupMembers -Users -Tickets -ScripActions -Templates -TicketCustomFieldValues -CustomFields -CustomFieldValues -sessions -); - -foreach my $table ( @tables ) { - print "drop table $table;\n"; - print "drop sequence ${table}_id_seq;\n"; -} - diff --git a/bin/slony-setup b/bin/slony-setup deleted file mode 100755 index b384bb9f7..000000000 --- a/bin/slony-setup +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/perl -# -# hack to update/add read-only permissions for a user on the db -# -# usage: pg-readonly freesideuser readonlyuser - -use strict; -use DBI; -use FS::UID qw(adminsuidsetup); -use FS::Record qw(dbdef); - -my $user = shift or die &usage; -adminsuidsetup($user); - -#--- - -#su postgres -c 'createlang plpgsql freeside' - -#--- - -my $MASTERHOST = '172.21.0.204'; -my $SLAVEHOST = '172.21.0.205'; -#my $REPLICATIONUSER='pgsql'; -my $REPLICATIONUSER='postgres'; - -#drop set ( id = 1, origin = 1); - -print <<END; -cluster name = freeside; -node 1 admin conninfo = 'dbname=freeside host=$MASTERHOST user=$REPLICATIONUSER'; -node 2 admin conninfo = 'dbname=freeside host=$SLAVEHOST user=$REPLICATIONUSER'; -init cluster ( id=1, comment = 'Master Node'); -create set (id=1, origin=1, comment='All freeside tables'); - -END - -my $id = 1; - -foreach my $table ( dbdef->tables ) { - #next if $table =~ /^sql_/i; - print "set add table (set id=1, origin=1, id=". $id++. ", fully qualified name = 'public.$table' );\n"; - -} - -print <<END; -store node (id=2, comment = 'Slave node'); -store path (server = 1, client = 2, conninfo='dbname=freeside host=$MASTERHOST user=$REPLICATIONUSER'); -store path (server = 2, client = 1, conninfo='dbname=freeside host=$SLAVEHOST user=$REPLICATIONUSER'); -store listen (origin=1, provider = 1, receiver =2); -store listen (origin=2, provider = 2, receiver =1); -END - - diff --git a/bin/svc_broadband.renumber b/bin/svc_broadband.renumber deleted file mode 100755 index 980fa0099..000000000 --- a/bin/svc_broadband.renumber +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/perl - -use strict; - -use FS::UID qw(adminsuidsetup); -use FS::Record qw(qsearch qsearchs); -use FS::svc_Common; -use FS::part_svc_router; -use FS::svc_broadband; -use FS::router; -use FS::addr_block; - -$FS::svc_Common::noexport_hack = 1; #Disable exports! - -my $user = shift if $ARGV[0] or die &usage; -adminsuidsetup($user); - -my $remapfile = shift if $ARGV[0] or die &usage; -my $old_blocknum = shift if $ARGV[0] or die &usage; -my $new_blocknum = shift if $ARGV[0] or die &usage; -my $old_svcnum = shift if $ARGV[0]; - -my %ipmap; - -open(REMAP, "<$remapfile") or die $!; -while (<REMAP>) { - next unless (/^([0-9\.]+)\s+([0-9\.]+)$/); - my ($old_ip, $new_ip) = ($1, $2); - $ipmap{$old_ip} = $new_ip; -} -close(REMAP); - -my @svcs; -if ($old_svcnum) { - @svcs = ( qsearchs('svc_broadband', { svcnum => $old_svcnum, - blocknum => $old_blocknum }) ); -} else { - @svcs = qsearch('svc_broadband', { blocknum => $old_blocknum }); -} - -foreach my $old_sb (@svcs) { - - my $old_ip = $old_sb->ip_addr; - my $new_ip = $ipmap{$old_ip}; - print "Renumbering ${old_ip} (${old_blocknum}) => ${new_ip} (${new_blocknum})...\n"; - - - my $new_sb = new FS::svc_broadband - { $old_sb->hash, - ip_addr => $new_ip, - blocknum => $new_blocknum, - svcpart => $old_sb->cust_svc->svcpart, - }; - - my $error = $new_sb->replace($old_sb); - die $error if $error; - -} - - - -exit(0); - -sub usage { - - my $usage = <<EOT; -Usage: - svc_broadband.renumber user remapfile old_blocknum new_blocknum [ svcnum ] - -remapfile format: -old_ip_address new_ip_address -... - -Example remapfile: -10.0.0.5 192.168.0.5 -10.0.0.20 192.168.0.20 -10.0.0.32 192.168.0.3 - -Warning: This assumes your routers have already been reconfigured with the - new addresses. Exports will not be run! - -EOT - -} |