DMBS_DATAPUMP- import using DB link from on prem Oracle DB to AWS Oracle RDS
Direct DBMS_DATAPUMP Import using DB link
1. Create DB Link in target Oracle DB.
CREATE DATABASE LINK TESTDL CONNECT TO TEST identified by TEST123
USING '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=1....)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=TESTDB)))';
2. Validate the DB link.
select * from dba_db_links@TESTDL;
3. Run the below to direct import schema from on prem to target.
DECLARE
l_dp_handle NUMBER;
v_job_state varchar2(4000);
BEGIN
l_dp_handle := DBMS_DATAPUMP.open(operation => 'IMPORT',
job_mode => 'SCHEMA',
remote_link => 'TESTDL',
version => 'LATEST');
DBMS_DATAPUMP.add_file(handle => l_dp_handle,
filename => 'test.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE,
reusefile => 1);
DBMS_DATAPUMP.METADATA_FILTER(l_dp_handle, 'SCHEMA_LIST', '''TEST''');
DBMS_DATAPUMP.start_job(l_dp_handle);
DBMS_DATAPUMP.WAIT_FOR_JOB(l_dp_handle, v_job_state);
DBMS_OUTPUT.PUT_LINE(v_job_state);
END;
4. Monitor the log in AWS RDS.
SELECT * FROM TABLE(rdsadmin.rds_file_util.listdir('DATA_PUMP_DIR')) ORDER BY MTIME;
SELECT * FROM TABLE
(rdsadmin.rds_file_util.read_text_file(
p_directory => 'DATA_PUMP_DIR',
p_filename => 'test.log'));
5. Run below to import only metatadata
DECLARE
l_dp_handle NUMBER;
v_job_state varchar2(4000);
BEGIN
l_dp_handle := DBMS_DATAPUMP.open(operation => 'IMPORT',job_mode => 'SCHEMA',remote_link => 'TESTDL',version => 'LATEST');
DBMS_DATAPUMP.add_file(l_dp_handle, filename => 'test.log',directory => 'DATA_PUMP_DIR',filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
--reusefile => 1);
DBMS_DATAPUMP.METADATA_FILTER(l_dp_handle, 'SCHEMA_LIST', '''TEST''');
dbms_datapump.data_filter(l_dp_handle,name => 'INCLUDE_ROWS', value => 0);
DBMS_DATAPUMP.start_job(l_dp_handle);
DBMS_DATAPUMP.WAIT_FOR_JOB(l_dp_handle, v_job_state);
DBMS_OUTPUT.PUT_LINE(v_job_state);
END;
/
6. Run below to import only tables
DECLARE
l_dp_handle NUMBER;
v_job_state varchar2(4000);
BEGIN
l_dp_handle := DBMS_DATAPUMP.open(operation => 'IMPORT',
job_mode => 'SCHEMA',
remote_link => 'TESTDL',
version => 'LATEST');
DBMS_DATAPUMP.add_file(handle => l_dp_handle,
filename => 'OTHER_TABLE.log',
directory => 'DATA_PUMP_DIR',
filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE,
reusefile => 1);
DBMS_DATAPUMP.METADATA_FILTER(l_dp_handle, 'SCHEMA_LIST', '''TEST''');
-- only TABLES...
dbms_datapump.metadata_filter ( handle => l_dp_handle, name => 'INCLUDE_PATH_EXPR', value => q'[= 'TABLE']' );
-- and then only the EMPLOYEES table
dbms_datapump.metadata_filter ( handle => l_dp_handle, name => 'NAME_LIST', value => q'['TEST','TEST1']', object_path => 'TABLE' );
dbms_datapump.set_parameter (handle => l_dp_handle, name => 'TABLE_EXISTS_ACTION', value => 'REPLACE' );
DBMS_DATAPUMP.start_job(l_dp_handle);
DBMS_DATAPUMP.WAIT_FOR_JOB(l_dp_handle, v_job_state);
DBMS_OUTPUT.PUT_LINE(v_job_state);
END;
/
7.
EXPDP with QUERY and where clause - last 10 days data export using query
expdp dumpfile=test_bkp_12DEC24.dmp logfile=test_bkp_12DEC24.log TABLES=TEST.attribute query=test.attribute:\"where createdate > sysdate -10\""
Comments
Post a Comment