Currently, I am trying to bulk insert a large amount of data (~500,000 rows) across a database link. I am grabbing the data from Materialized Views. I was going to add indexes but read somewhere that this will actually slow the process down. After I insert the rows, I am grabbing the Unique IDs and inserting them into a flagging table so that they are marked as 'inserted' and aren't inserted again. However this process has been stuck for about 30 minutes now. Is there a better way to do this? (Below is my code).
create or replace PROCEDURE SEND_DATA
IS
CURSOR cursora
IS
SELECT DISTINCT unique_id_1
FROM mv_1;
CURSOR cursorb
IS
SELECT DISTINCT unique_id_2
FROM mv_2;
ca cursora%ROWTYPE;
cb cursorb%ROWTYPE;
sent_flag NUMBER(10);
BEGIN
SELECT flag_id
INTO sent_flag
FROM flag f
WHERE f.flag_tx = 'SENT';
---
Delete FROM TABLE1@db1
WHERE to_date(to_char(LOCAL_TIMESTAMP,'mm/dd/yyyy'),'mm/dd/yyyy') || code in
(SELECT distinct to_date(to_char(LOCAL_TIME_TS,'mm/dd/yyyy'),'mm/dd/yyyy'), code FROM MV_1);
COMMIT;
Delete FROM TABLE1@db1
WHERE type || timestamp in (SELECT DATA_Type_TX || UTC_TS FROM MV_1);
COMMIT;
insert into TABLE1@db1(DATE, TYPE, VALUE, LAST_UPDATE, FLAG, LOCAL_TIMESTAMP)
SELECT DATA_DATE, NAME, VALUE, SYSDATE, null, LOCAL_TIME
FROM MV_2 A;
COMMIT;
OPEN cursora;
LOOP
FETCH cursora into ra;
EXIT WHEN cursora%NOTFOUND;
INSERT INTO flag(
SUBMIT_ID,
FLAG_ID,
CREATE_USER_ID,
CREATE_DT)
VALUES (
rdba.SUBMIT_ID,
SENT_FLAG,
'1',
sysdate);
END LOOP;
CLOSE cursora;
COMMIT;
---
EXCEPTION
WHEN OTHERS
THEN
NULL;
RAISE;
END SEND_DATA;