Please see my other blog for Oracle EBusiness Suite Posts - EBMentors

Search This Blog

Note: All the posts are based on practical approach avoiding lengthy theory. All have been tested on some development servers. Please don’t test any post on production servers until you are sure.

Sunday, November 28, 2010

One-Way SCHEMA Level Streams Replication

In a nutshell, replication using Oracle Streams is implemented in the following way:
1.-A background capture process is configured to capture changes made to tables,schemas, or the entire database. The capture process captures changes from the redo log and formats each captured change into a logical change record (LCR). The capture process uses logminer to mine the redo/archive logs to format LCRs.
2.-The capture process enqueues LCR events into a queue that is specified.
3.-This queue is scheduled to Propagate events from one queue to another in a different database.
4.-A background apply process dequeues the events and applies them at the destination database.
For Streams overview & required configurations before using it please see Streams 10gR2 - How to

Please setup you environment , I've setup my environment as following, keep this info in your mind while doing the steps:
 HOMEDEV Global Database name of the Source (capture) Site
 ASMDB   Global Database name of the Target (apply) Site


 On HOMEDEV  (SOURCE/CAPTURE)
Create the user for stream administration
       conn sys/homedev@homedev as sysdba
       create user STRMADMIN identified by STRMADMIN
       ALTER USER STRMADMIN DEFAULT TABLESPACE USERS
       TEMPORARY TABLESPACE TEMP
       QUOTA UNLIMITED ON USERS;
      GRANT CONNECT, RESOURCE, AQ_ADMINISTRATOR_ROLE,DBA to STRMADMIN;
      execute DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE('STRMADMIN');

      SELECT * FROM global_name;
      HOMEDEV.REGRESS.RDBMS.DEV.US.ORACLE.COM

 On ASMDB  (TARGET/APPLY)
   create user STRMADMIN identified by STRMADMIN

    create tablespace users datafile '+DB_DATA/asmdb/datafile/USERS.dbf' size 200M

    create tablespace EXAMPLE datafile '+DB_DATA/asmdb/datafile/EXAMPLE.dbf' size 200M

    ALTER USER STRMADMIN DEFAULT TABLESPACE USERS
    TEMPORARY TABLESPACE TEMP
    QUOTA UNLIMITED ON USERS;

    GRANT CONNECT, RESOURCE, AQ_ADMINISTRATOR_ROLE,DBA to STRMADMIN;

    execute DBMS_STREAMS_AUTH.GRANT_ADMIN_PRIVILEGE('STRMADMIN');
    
    ALTER DATABASE RENAME GLOBAL_NAME TO   
    ASMDB.REGRESS.RDBMS.DEV.US.ORACLE.COM


/* STEP 1.- Create the streams queue and the database links that will be used for propagation. */
        connect STRMADMIN/STRMADMIN@HOMEDEV

        BEGIN
            DBMS_STREAMS_ADM.SET_UP_QUEUE(
            queue_table => 'STREAMS_QUEUE_TABLE',
            queue_name  => 'STREAMS_QUEUE',
            queue_user  => 'STRMADMIN');
       END;
      /

     conn sys/homedev@homedev as sysdba 

     create public database link ASMDB.REGRESS.RDBMS.DEV.US.ORACLE.COM using 'ASMDB';

    connect STRMADMIN/STRMADMIN@HOMEDEV

    create database link ASMDB.REGRESS.RDBMS.DEV.US.ORACLE.COM connect to strmadmin
     identified by strmadmin;
/* STEP 2.- Connect as the Streams Administrator in the target site strm2.net and create the streams queue */
    connect STRMADMIN/STRMADMIN@ASMDB
   BEGIN
     DBMS_STREAMS_ADM.SET_UP_QUEUE(
     queue_table => 'STREAMS_QUEUE_TABLE',
     queue_name  => 'STREAMS_QUEUE',
     queue_user  => 'STRMADMIN');
  END;
  /

/* STEP 3.- Add apply rules for the Schema at the destination database  */
BEGIN
   DBMS_STREAMS_ADM.ADD_SCHEMA_RULES(
     schema_name     => 'HR',
     streams_type    => 'APPLY ',
     streams_name    => 'STREAM_APPLY',
     queue_name      => 'STRMADMIN.STREAMS_QUEUE',
     include_dml     => true,
     include_ddl     => true,
     source_database => 'HOMEDEV.REGRESS.RDBMS.DEV.US.ORACLE.COM');
END;

/* STEP 4.- Add capture rules for the schema HR at the source database */
CONN STRMADMIN/STRMADMIN@HOMEDEV
BEGIN
  DBMS_STREAMS_ADM.ADD_SCHEMA_RULES(
    schema_name     => 'HR',
    streams_type    => 'CAPTURE',
    streams_name    => 'STREAM_CAPTURE',
    queue_name      => 'STRMADMIN.STREAMS_QUEUE',
    include_dml     => true,
    include_ddl     => true,
    source_database => 'HOMEDEV.REGRESS.RDBMS.DEV.US.ORACLE.COM');
END;

/* STEP 5.- Add propagation rules for the schema HR at the source database. This step will also create a propagation job to the destination database */
BEGIN
   DBMS_STREAMS_ADM.ADD_SCHEMA_PROPAGATION_RULES(
     schema_name            => 'HR',
     streams_name           => 'STREAM_PROPAGATE',
     source_queue_name      => 'STRMADMIN.STREAMS_QUEUE',
     destination_queue_name => 'STRMADMIN.STREAMS_QUEUE@ASMDB.REGRESS.RDBMS.DEV.US.ORACLE.COM',
     include_dml            => true,
     include_ddl            => true,
     source_database        => 'HOMEDEV.REGRESS.RDBMS.DEV.US.ORACLE.COM');
END;

/* STEP 6.- Export, import and instantiation of tables from Source to Destination Database; if the objects are not present in the destination database, perform an export of the objects from the source database and import them into the destination database
Export from the Source Database:
Specify the OBJECT_CONSISTENT=Y clause on the export command.
By doing this, an export is performed that is consistent for each individual object at a particular system change number (SCN).  */
D:\Home_Backup\STREAMTEST_EXP>exp userid=system/manager@homedev owner=HR file=hr.dmp log=hr_exp.log object_consistent=Y statisti
=NONE

/* Import into the Destination Database:
Specify STREAMS_INSTANTIATION=Y clause in the import command.
By doing this, the streams metadata is updated with the appropriate information in the destination database corresponding to the SCN that is recorded in the export file  */
D:\Home_Backup\STREAMTEST_EXP>imp userid=system/manager@asmdb full=Y constraints=Y file=hr.dmp ignore=y commit=y log=hr_imp.log st
reams_instantiation=y

/* STEP 7.- Specify an 'APPLY USER' at the destination database.
This is the user who would apply all statements and DDL statements.
The user specified in the APPLY_USER parameter must have the necessary privileges to perform DML and DDL changes on the apply objects. */
conn strmadmin/strmadmin@asmdb

BEGIN
  DBMS_APPLY_ADM.ALTER_APPLY(
    apply_name => 'STREAM_APPLY',
    apply_user => 'HR');
END;

/
/* STEP 8.- Set stop_on_error to false so apply does not abort for every error; then, start the Apply process on the destination */
conn strmadmin/strmadmin@asmdb
 

BEGIN
  DBMS_APPLY_ADM.SET_PARAMETER(
    apply_name => 'STREAM_APPLY',
    parameter  => 'disable_on_error',
    value      => 'n');
END;
/
DECLARE
   v_started number;
BEGIN
   SELECT decode(status, 'ENABLED', 1, 0) INTO v_started
   FROM DBA_APPLY WHERE APPLY_NAME = 'STREAM_APPLY';
   if (v_started = 0) then
      DBMS_APPLY_ADM.START_APPLY(apply_name => 'STREAM_APPLY');
   end if;
END;
/

/* STEP 9.- Set up capture to retain 7 days worth of logminer checkpoint information, then start the Capture process on the source */

conn strmadmin/strmadmin@homedev

BEGIN
  DBMS_CAPTURE_ADM.ALTER_CAPTURE(
    capture_name              => 'STREAM_CAPTURE',
    checkpoint_retention_time => 7);
END;
/

begin
  DBMS_CAPTURE_ADM.START_CAPTURE(capture_name => 'STREAM_CAPTURE');
end;
/
/* Perform changes in tables belonging to HR on the source site and check that these are applied on the destination */

conn HR/HR@homedev
insert into HR.DEPARTMENTS values (991,'OTHER',205,1700);
commit;
alter table HR.EMPLOYEES add (NEWCOL VARCHAR2(10));
create table hr.t(id number)

Related Posts:
Streams 10gR2 - How to
Remove the local Streams configuration 

No comments: