Datapump Export to S3/Glacier

From NazimWIKI
Jump to navigation Jump to search

Another script that has been documented for preservation more than anything else.

Pre-requisites

  • ec2 Instance with Oracle XE installed
  • Access to the RDS Databases using tnsnames aliases. In my case all aliases are of the format DBNAME_AWS
#!/bin/ksh
#
# Shell script for Archive of Databases to Glacier
# N Merchant
# 26 Jul 2018
#
# Usage script.ksh DBNAME e.g. database_archives.ksh MULTIDEV
#=============================================================================

MAIL_LIST="user@mail.com"


DT=`date "+%Y%m%d"`
FD=/home/oracle/backup/$1

echo "CLEAR AND CREATE DIRECTORY"

rm -rf $FD
mkdir $FD

cd /home/oracle/bin

# Fetch export_user_password from parameter store
export_user_password=$(aws ssm get-parameter \
    --region <region> \
    --name export-user-password \
    --with-decryption \
    --query Parameter.Value \
    --output text)

# Fetch system_password from parameter store for the XE Instance
system_password=$(aws ssm get-parameter \
    --region <region> \
    --name oracle-xe-password \
    --with-decryption \
    --query Parameter.Value \
    --output text)

echo "CAPTURE SCN"

#Capture SCN
x=`sqlplus -s oracle_dba/${oracle_dba_password}@$1_AWS << EOF | tee -a $FD/$1_SCN.log
set heading off
set underline off
set termout off
set trimspool on
set pagesize 0
select trim(to_char(current_scn,'9999999999999')) from v\\$database;
EOF`


echo "DATAPUMP EXPORT"

# Datapump Export

dp="expdp export_user/${export_user_password}@$1_AWS directory=DATA_PUMP_DIR dumpfile=$1_$DT.dmp logfile=$1_$DT.log flashback_scn=$x full=y"

$dp

echo "QUERY LOGFILES"

# Query Logfiles and error handle SYS Insufficient privileges

sqlplus -s oracle_dba/${oracle_dba_password}@$1_AWS << EOF | tee -a $FD/$1_$DT.log
set serveroutput on
declare

v_knt number := 0;
v_fs number;
v_knt2 number := 0;
v_knt3 number := 0;

cursor txt is
select text from table(rdsadmin.rds_file_util.read_text_file (p_directory => 'DATA_PUMP_DIR',  p_filename  => '$1_$DT.log'));

begin

select count(*)*2 into v_knt2
  from (
select rownum
     , text
     , lead(text) over (order by rownum) next_text
from table(rdsadmin.rds_file_util.read_text_file (p_directory => 'DATA_PUMP_DIR',  p_filename  => '$1_$DT.log'))
where  text like '%ORA-%'
order by rownum)
where text || ' ' || next_text like '%ORA-31693%SYS%ORA-01031%';


select filesize into v_fs from table(RDSADMIN.RDS_FILE_UTIL.LISTDIR('DATA_PUMP_DIR')) where filename='$1_$DT.dmp';

dbms_output.enable(100000);

for i in txt loop

  if i.text like '%ORA-%' then
    v_knt := v_knt+1;
  end if;

  dbms_output.put_line(i.text);

end loop;

v_knt3 := v_knt-v_knt2;

dbms_output.put_line('IDORA:' || v_knt);
dbms_output.put_line('IDORA2:' || v_knt2);
dbms_output.put_line('IDSIZ:' || v_fs);
dbms_output.put_line('IDORA3:' || v_knt3);


end;
/
exit;
EOF

echo "CREATE DATABASE LINK"

# Create Database Link from XE to TARGET PLUS CREATE THE DATA_PUMP_DIR
sqlplus -s system/${system_password}@XE << EOF | tee -a $FD/$1_DBLINK_DPDIR.log
define pwd = ${oracle_dba_password}
create public database link "$1"
connect to oracle_dba
Identified by &pwd
using '$1_AWS';

create or replace directory DPUMP_$1 as '/home/oracle/backup/$1';
grant read, write on directory DPUMP_$1 to public;
exit;
EOF

echo "COPY EXPORT DUMPFILE"

# Copy Down the Export Dumpfile
sqlplus -s system/${system_password}@XE << EOF | tee -a $FD/$1_FILECOPY.log
BEGIN
 DBMS_FILE_TRANSFER.GET_FILE(
      source_directory_object       => 'DATA_PUMP_DIR',
      source_file_name              => '$1_$DT.dmp',
      destination_directory_object  => 'DPUMP_$1',
      destination_file_name         => '$1_$DT.dmp',
      source_database               => '$1'
      );
    END;
/
exit;
EOF

echo "CHECK ERRORS IN LOGFILES"

# Check for Errors in Export and Filesize
vORA=`grep -oP '(?<=IDORA3:)[0-9]+' $FD/$1_$DT.log`
vSIZ=`grep -oP '(?<=IDSIZ:)[0-9]+' $FD/$1_$DT.log`
vPRES=`ls $FD/*.dmp | wc -l`
vCOMP=`stat -c%s "$FD/$1_$DT.dmp"`


# Check number of Oracle Errors
if [ $vORA -gt 0 ]; then
   vSTAT1="ERROR (ORA-  $vORA) "
else
   vSTAT1="SUCCESS (ORA $vORA)"
fi

# Check If File is Present
if [ $vPRES == 1 ]; then
  vPRESOUT="Ok"
else
  vPRESOUT="NotOK"
fi

# Check if Filesizes Match
if [ $vSIZ == $vCOMP ]; then
   vSIZOUT="Ok"
else
  vSIZOUT="NotOk"
fi

vOUTCOME=$vPRESOUT$vSIZOUT


echo "COMPLETE STATUS, TAR AND FILE REMOVAL"

# Tar and Copy the Files anyway as Oracle errors may not be fatal ones.  Remove Files
if [ $vOUTCOME == "OkOk" ]; then

        tar czf $FD/$1_$DT.tar.gz $FD/*
        aws s3 cp $FD/*.gz s3://ee-development-db-backups

qry=`sqlplus -s oracle_dba/${oracle_dba_password}@$1_AWS << EOF | tee -a $FD/$1_REMDUMP.log
       exec utl_file.fremove('DATA_PUMP_DIR','$1_$DT.dmp');
       exec utl_file.fremove('DATA_PUMP_DIR','$1_$DT.log');
       exit;
       EOF`

   vSTAT2="SUCCESS (FILE $vPRESOUT SIZE $vSIZOUT)"

else

   vSTAT2="ERROR (FILE $vPRESOUT SIZE $vSIZOUT)"

fi


echo "SEND EMAIL"

# Send Mail


v_body=`cat $FD/$1_$DT.log`


sendmail -oi -t <<EOF
To: $MAIL_LIST
Subject: $1 GLACIER $vSTAT1 $vSTAT2

********NOTE: ORA- ERRORS RELATING TO SYS OBJECTS WITH INSUFFICIENT PRIVILEGES HAVE BEEN IGNORED********
$v_body

EOF