Bulk collect with Nested loops
Hi I've a requirement like this
I need to pull request nos from table a(Master table)
For every request no I need to pull request details from table b(Detail Table)
For every request no I need to pull contact details from table c
For every request no I need to pull customer data table d and I need to create a flat file with that data so I'm using utl_file in normal query criterion because of nested loops it's taking lot of time so I want to use bulk collect with dynamic query option:
Sample code
=======
create or replace procedure test(region varchar2) as
type tablea_request_typ is table of varchar2(10);
tablea_data tablea_request_typ;
type tableb_request_typ is table of varchar2(1000);
tableb_data tableb_request_typ;
type tablec_request_typ is table of varchar2(1000);
tablec_data tablec_request_typ;
type tabled_request_typ is table of varchar2(1000);
tabled_data tabled_request_typ;
stmta varchar2(32000);
stmtb varchar2(32000);
stmtc varchar2(32000);
stmtd varchar2(32000);
rcura SYS_REFCURSOR;
rcurb SYS_REFCURSOR;
rcurc SYS_REFCURSOR;
rcurd SYS_REFCURSOR;
begin
stmta:='select request_no from tablea where :region'||'='NE';
stmtb:='select request_no||request_detail1||request_detail2 stringb from table b where :region'||'='NE';
stmtc:='select contact1||contact2||contact3||contact4 stringc from table c where :region'||'='NE';
stmtd:='select customer1||customer2||customer3||customer4 stringd from table c where :region'||'='NE';
OPEN rcura for stmta;
LOOP
FETCH rcura BULK COLLECT INTO request_no
LIMIT 1000;
FOR f in 1..request_no.count
LOOP
--Tableb
OPEN rcurb for stmtb USING substr(request_no(f),1,14);
LOOP
FETCH rcurb BULK COLLECT INTO tableb_data
for i in 1..tableb_data.count
LOOP
utl_file(...,tableb_data(i));
END LOOP;
EXIT WHEN rcurb%NOTFOUND;
END LOOP;
-- Tablec
OPEN rcurc for stmtc USING substr(request_no(f),1,14);
LOOP
FETCH rcurb BULK COLLECT INTO tablec_data
for i in 1..tablec_data.count
LOOP
utl_file(...,tablec_data(i));
END LOOP;
EXIT WHEN rcurc%NOTFOUND;
END LOOP;
-- Tabled
OPEN rcurd for stmtd USING substr(request_no(f),1,14);
LOOP
FETCH rcurd BULK COLLECT INTO tabled_data
for i in 1..tabled_data.count
LOOP
utl_file(...,tabled_data(i));
END LOOP;
EXIT WHEN rcurd%NOTFOUND;
END LOOP;
END LOOP;
EXIT WHEN rcura%NOTFOUND;
END LOOP;
exception
when other then
dbms_output.put_line(sqlerrm);
end;I 'm using code mentioned above but request nos are repeating as if it's an infinete loop ?for ex if request no is 222 It should run once but here it's running more than once?
How to pass bind parameters say in my case region?
Are there any alternate solutions to run it faster apart from using bulk collect?
Right now I'm using explicit cursor with for loop which is taking lot of time ?so is this better sol?
Thanks,
Mahender
Edited by: BluShadow on 24-Aug-2011 08:52
added {noformat}{noformat} tags. Please read {message:id=9360002} to learn to format your code/data yourself.
Use Parameterized cursor :
CREATE OR REPLACE PROCEDURE test(region varchar2)
AS
type tablea_request_typ is table of varchar2(10);
type tableb_request_typ is table of varchar2(1000);
type tablec_request_typ is table of varchar2(1000);
type tabled_request_typ is table of varchar2(1000);
tablea_data tablea_request_typ;
tableb_data tableb_request_typ;
tablec_data tablec_request_typ;
tabled_data tabled_request_typ;
CURSOR rcura(v_region VARCHAR2(100))
IS
select request_no from tablea where region = v_region;
CURSOR rcurb(v_input VARCHAR2(100))
IS
select request_no||request_detail1||request_detail2 stringb from table b where request_num = v_input;
CURSOR rcurc(v_input VARCHAR2(100))
IS
select select contact1||contact2||contact3||contact4 stringc from table c where request_num = v_input;
CURSOR rcurd(v_input VARCHAR2(100))
IS
select select customer1||customer2||customer3||customer4 stringd from table c where request_num = v_input;
BEGIN
OPEN rcura('NE');
LOOP
FETCH rcura BULK COLLECT INTO request_no LIMIT 1000;
FOR f in 1..request_no.count
LOOP
--Tableb
OPEN rcurb(substr(request_no(f),1,14));
LOOP
FETCH rcurb BULK COLLECT INTO tableb_data
for i in 1..tableb_data.count
LOOP
utl_file(...,tableb_data(i));
END LOOP;
EXIT WHEN rcurb%NOTFOUND;
END LOOP;
-- Tablec
OPEN rcurc (substr(request_no(f),1,14));
LOOP
FETCH rcurb BULK COLLECT INTO tablec_data
for i in 1..tablec_data.count
LOOP
utl_file(...,tablec_data(i));
END LOOP;
EXIT WHEN rcurc%NOTFOUND;
END LOOP;
-- Tabled
OPEN rcurd ( substr(request_no(f),1,14) );
LOOP
FETCH rcurd BULK COLLECT INTO tabled_data
for i in 1..tabled_data.count
LOOP
utl_file(...,tabled_data(i));
END LOOP;
EXIT WHEN rcurd%NOTFOUND;
END LOOP;
END LOOP;
EXIT WHEN rcura%NOTFOUND;
END LOOP;
EXCEPTION
WHEN OTHERS THEN
dbms_output.put_line(dbms_utility.format_error_backtrace);
END;
/Hope this helps. If not, post your table structures...
Similar Messages
-
Error using BULK Collect with RECORD TYPE
hello
I have written a simple Procedure by declaring a record type & then making a variable of NESTED Table type.
I then select data using BULK COLLECT & tryin to access it through a LOOP.....Getting an ERROR.
CREATE OR REPLACE PROCEDURE sp_test_bulkcollect
IS
TYPE rec_type IS RECORD (
emp_id VARCHAR2(20),
level_id NUMBER
TYPE v_rec_type IS TABLE OF rec_type;
BEGIN
SELECT employee_id, level_id
BULK COLLECT INTO v_rec_type
FROM portfolio_exec_level_mapping
WHERE portfolio_execp_id = 2851852;
FOR indx IN v_rec_type.FIRST..v_rec_type.LAST
LOOP
dbms_output.put_line('Emp -- '||v_rec_type.emp_id(indx)||' '||v_rec_type.level_id(indx));
END LOOP;
END;
Below are the ERROR's i am getting ....
- Compilation errors for PROCEDURE DOMRATBDTESTUSER.SP_TEST_BULKCOLLECT
Error: PLS-00321: expression 'V_REC_TYPE' is inappropriate as the left hand side of an assignment statement
Line: 15
Text: FROM portfolio_exec_level_mapping
Error: PL/SQL: ORA-00904: : invalid identifier
Line: 16
Text: WHERE portfolio_execp_id = 2851852;
Error: PL/SQL: SQL Statement ignored
Line: 14
Text: BULK COLLECT INTO v_rec_type
Error: PLS-00302: component 'FIRST' must be declared
Line: 19
Text: LOOP
Error: PL/SQL: Statement ignored
Line: 19
Text: LOOP
PLZ Help.and with a full code sample:
SQL> CREATE OR REPLACE PROCEDURE sp_test_bulkcollect
2 IS
3 TYPE rec_type IS RECORD (
4 emp_id VARCHAR2(20),
5 level_id NUMBER
6 );
7 TYPE v_rec_type IS TABLE OF rec_type;
8 v v_rec_type;
9 BEGIN
10 SELECT empno, sal
11 BULK COLLECT INTO v
12 FROM emp
13 WHERE empno = 7876;
14 FOR indx IN v.FIRST..v.LAST
15 LOOP
16 dbms_output.put_line('Emp -- '||v(indx).emp_id||' '||v(indx).level_id);
17 END LOOP;
18 END;
19 /
Procedure created.
SQL>
SQL> show error
No errors.
SQL>
SQL> begin
2 sp_test_bulkcollect;
3 end;
4 /
Emp -- 7876 1100
PL/SQL procedure successfully completed. -
Is there a way to BULK COLLECT with FOR UPDATE and not lock ALL the rows?
Currently, we fetch a cursor on a few million rows using BULK COLLECT.
In a FORALL loop, we update the rows.
What is happening now, is that we run this procedure at the same time, and there is another session running a MERGE statement on the same table, and a DEADLOCK is created between them.
I'd like to add to the cursor the FOR UPDATE clause, but from what i've read,
it seems that this will cause ALL the rows in the cursor to become locked.
This is a problem, as the other session is running MERGE statements on the table every few seconds, and I don't want it to fail with ORA-0054 (resource busy).
What I would like to know is if there is a way, that only the rows in the
current bulk will be locked, and all the other rows will be free for updates.
To reproduce this problem:
1. Create test table:
create table TEST_TAB
ID1 VARCHAR2(20),
ID2 VARCHAR2(30),
LAST_MODIFIED DATE
2. Add rows to test table:
insert into TEST_TAB (ID1, ID2, LAST_MODIFIED)
values ('416208000770698', '336015000385349', to_date('15-11-2009 07:14:56', 'dd-mm-yyyy hh24:mi:ss'));
insert into TEST_TAB (ID1, ID2, LAST_MODIFIED)
values ('208104922058401', '336015000385349', to_date('15-11-2009 07:11:15', 'dd-mm-yyyy hh24:mi:ss'));
insert into TEST_TAB (ID1, ID2, LAST_MODIFIED)
values ('208104000385349', '336015000385349', to_date('15-11-2009 07:15:13', 'dd-mm-yyyy hh24:mi:ss'));
3. Create test procedure:
CREATE OR REPLACE PROCEDURE TEST_PROC IS
TYPE id1_typ is table of TEST_TAB.ID1%TYPE;
TYPE id2_typ is table of TEST_TAB.ID2%TYPE;
id1_arr id1_typ;
id2_arr id2_typ;
CURSOR My_Crs IS
SELECT ID1, ID2
FROM TEST_TAB
WHERE ID2 = '336015000385349'
FOR UPDATE;
BEGIN
OPEN My_Crs;
LOOP
FETCH My_Crs bulk collect
INTO id1_arr, id2_arr LIMIT 1;
Forall i in 1 .. id1_arr.COUNT
UPDATE TEST_TAB
SET LAST_MODIFIED = SYSDATE
where ID2 = id2_arr(i)
and ID1 = id1_arr(i);
dbms_lock.sleep(15);
EXIT WHEN My_Crs%NOTFOUND;
END LOOP;
CLOSE My_Crs;
COMMIT;
EXCEPTION
WHEN OTHERS THEN
RAISE_APPLICATION_ERROR(-20000,
'Test Update ' || SQLCODE || ' ' || SQLERRM);
END TEST_PROC;
4. Create another procedure to check if table rows are locked:
create or replace procedure check_record_locked(p_id in TEST_TAB.ID1%type) is
cursor c is
select 'dummy'
from TEST_TAB
WHERE ID2 = '336015000385349'
and ID1 = p_id
for update nowait;
e_resource_busy exception;
pragma exception_init(e_resource_busy, -54);
begin
open c;
close c;
dbms_output.put_line('Record ' || to_char(p_id) || ' is not locked.');
rollback;
exception
when e_resource_busy then
dbms_output.put_line('Record ' || to_char(p_id) || ' is locked.');
end check_record_locked;
5. in one session, run the procedure TEST_PROC.
6. While it's running, in another session, run this block:
begin
check_record_locked('208104922058401');
check_record_locked('416208000770698');
check_record_locked('208104000385349');
end;
7. you will see that all records are identified as locked.
Is there a way that only 1 row will be locked, and the other 2 will be unlocked?
Thanks,
Yoni.I don't have database access on weekends (look at it as a template)
suppose you
create table help_iot
(bucket number,
id1 varchar2(20),
constraint help_iot_pk primary key (bucket,id1)
organization index;not very sure about the create table syntax above.
declare
maximal_bucket number := 10000; -- will update few hundred rows at a time if you must update few million rows
the_sysdate date := sysdate;
begin
truncate table help_iot;
insert into help_iot
select ntile(maximal_bucket) over (order by id1) bucket,id1
from test_tab
where id2 = '336015000385349';
for i in 1 .. maximal_bucket
loop
select id1,id2,last_modified
from test_tab
where id2 = '336015000385349'
and id1 in (select id1
from help_iot
where bucket = i
for update of last_modified;
update test_tab
set last_modified = the_sysdate
where id2 = '336015000385349'
and id1 in (select id1
from help_iot
where bucket = i
commit;
dbms_lock.sleep(15);
end loop;
end;Regards
Etbin
introduced the_sysdate if last_modified must be the same for all updated rows
Edited by: Etbin on 29.11.2009 16:48 -
Doubt about Bulk Collect with LIMIT
Hi
I have a Doubt about Bulk collect , When is done Commit
I Get a example in PSOUG
http://psoug.org/reference/array_processing.html
CREATE TABLE servers2 AS
SELECT *
FROM servers
WHERE 1=2;
DECLARE
CURSOR s_cur IS
SELECT *
FROM servers;
TYPE fetch_array IS TABLE OF s_cur%ROWTYPE;
s_array fetch_array;
BEGIN
OPEN s_cur;
LOOP
FETCH s_cur BULK COLLECT INTO s_array LIMIT 1000;
FORALL i IN 1..s_array.COUNT
INSERT INTO servers2 VALUES s_array(i);
EXIT WHEN s_cur%NOTFOUND;
END LOOP;
CLOSE s_cur;
COMMIT;
END;If my table Servers have 3 000 000 records , when is done commit ? when insert all records ?
could crash redo log ?
using 9.2.08muttleychess wrote:
If my table Servers have 3 000 000 records , when is done commit ? Commit point has nothing to do with how many rows you process. It is purely business driven. Your code implements some business transaction, right? So if you commit before whole trancaction (from business standpoint) is complete other sessions will already see changes that are (from business standpoint) incomplete. Also, what if rest of trancaction (from business standpoint) fails?
SY. -
Problem with BULK COLLECT with million rows - Oracle 9.0.1.4
We have a requirement where are supposed to load 58 millions of rows into a FACT Table in our DATA WAREHOUSE. We initially planned to use Oracle Warehouse Builder but due to performance reasons, decided to write custom code. We wrote a custome procedure which opens a simple cursor and reads all the 58 million rows from the SOURCE Table and in a loop processes the rows and inserts the records into a TARGET Table. The logic works fine but it took 20hrs to complete the load.
We then tried to leverage the BULK COLLECT and FORALL and PARALLEL options and modified our PL/SQL code completely to reflect these. Our code looks very simple.
1. We declared PL/SQL BINARY_INDEXed Tables to store the data in memory.
2. We used BULK COLLECT into FETCH the data.
3. We used FORALL statement while inserting the data.
We did not introduce any of our transformation logic yet.
We tried with the 600,000 records first and it completed in 1 min and 29 sec with no problems. We then doubled the no. of rows to 1.2 million and the program crashed with the following error:
ERROR at line 1:
ORA-04030: out of process memory when trying to allocate 16408 bytes (koh-kghu
call ,pmucalm coll)
ORA-06512: at "VVA.BULKLOAD", line 66
ORA-06512: at line 1
We got the same error even with 1 million rows.
We do have the following configuration:
SGA - 8.2 GB
PGA
- Aggregate Target - 3GB
- Current Allocated - 439444KB (439 MB)
- Maximum allocated - 2695753 KB (2.6 GB)
Temp Table Space - 60.9 GB (Total)
- 20 GB (Available approximately)
I think we do have more than enough memory to process the 1 million rows!!
Also, some times the same program results in the following error:
SQL> exec bulkload
BEGIN bulkload; END;
ERROR at line 1:
ORA-03113: end-of-file on communication channel
We did not even attempt the full load. Also, we are not using the PARALLEL option yet.
Are we hitting any bug here? Or PL/SQL is not capable of mass loads? I would appreciate any thoughts on this?
Thanks,
Haranadh
Following is the code:
set echo off
set timing on
create or replace procedure bulkload as
-- SOURCE --
TYPE src_cpd_dt IS TABLE OF ima_ama_acct.cpd_dt%TYPE;
TYPE src_acqr_ctry_cd IS TABLE OF ima_ama_acct.acqr_ctry_cd%TYPE;
TYPE src_acqr_pcr_ctry_cd IS TABLE OF ima_ama_acct.acqr_pcr_ctry_cd%TYPE;
TYPE src_issr_bin IS TABLE OF ima_ama_acct.issr_bin%TYPE;
TYPE src_mrch_locn_ref_id IS TABLE OF ima_ama_acct.mrch_locn_ref_id%TYPE;
TYPE src_ntwrk_id IS TABLE OF ima_ama_acct.ntwrk_id%TYPE;
TYPE src_stip_advc_cd IS TABLE OF ima_ama_acct.stip_advc_cd%TYPE;
TYPE src_authn_resp_cd IS TABLE OF ima_ama_acct.authn_resp_cd%TYPE;
TYPE src_authn_actvy_cd IS TABLE OF ima_ama_acct.authn_actvy_cd%TYPE;
TYPE src_resp_tm_id IS TABLE OF ima_ama_acct.resp_tm_id%TYPE;
TYPE src_mrch_ref_id IS TABLE OF ima_ama_acct.mrch_ref_id%TYPE;
TYPE src_issr_pcr IS TABLE OF ima_ama_acct.issr_pcr%TYPE;
TYPE src_issr_ctry_cd IS TABLE OF ima_ama_acct.issr_ctry_cd%TYPE;
TYPE src_acct_num IS TABLE OF ima_ama_acct.acct_num%TYPE;
TYPE src_tran_cnt IS TABLE OF ima_ama_acct.tran_cnt%TYPE;
TYPE src_usd_tran_amt IS TABLE OF ima_ama_acct.usd_tran_amt%TYPE;
src_cpd_dt_array src_cpd_dt;
src_acqr_ctry_cd_array src_acqr_ctry_cd;
src_acqr_pcr_ctry_cd_array src_acqr_pcr_ctry_cd;
src_issr_bin_array src_issr_bin;
src_mrch_locn_ref_id_array src_mrch_locn_ref_id;
src_ntwrk_id_array src_ntwrk_id;
src_stip_advc_cd_array src_stip_advc_cd;
src_authn_resp_cd_array src_authn_resp_cd;
src_authn_actvy_cd_array src_authn_actvy_cd;
src_resp_tm_id_array src_resp_tm_id;
src_mrch_ref_id_array src_mrch_ref_id;
src_issr_pcr_array src_issr_pcr;
src_issr_ctry_cd_array src_issr_ctry_cd;
src_acct_num_array src_acct_num;
src_tran_cnt_array src_tran_cnt;
src_usd_tran_amt_array src_usd_tran_amt;
j number := 1;
CURSOR c1 IS
SELECT
cpd_dt,
acqr_ctry_cd ,
acqr_pcr_ctry_cd,
issr_bin,
mrch_locn_ref_id,
ntwrk_id,
stip_advc_cd,
authn_resp_cd,
authn_actvy_cd,
resp_tm_id,
mrch_ref_id,
issr_pcr,
issr_ctry_cd,
acct_num,
tran_cnt,
usd_tran_amt
FROM ima_ama_acct ima_ama_acct
ORDER BY issr_bin;
BEGIN
OPEN c1;
FETCH c1 bulk collect into
src_cpd_dt_array ,
src_acqr_ctry_cd_array ,
src_acqr_pcr_ctry_cd_array,
src_issr_bin_array ,
src_mrch_locn_ref_id_array,
src_ntwrk_id_array ,
src_stip_advc_cd_array ,
src_authn_resp_cd_array ,
src_authn_actvy_cd_array ,
src_resp_tm_id_array ,
src_mrch_ref_id_array ,
src_issr_pcr_array ,
src_issr_ctry_cd_array ,
src_acct_num_array ,
src_tran_cnt_array ,
src_usd_tran_amt_array ;
CLOSE C1;
FORALL j in 1 .. src_cpd_dt_array.count
INSERT INTO ima_dly_acct (
CPD_DT,
ACQR_CTRY_CD,
ACQR_TIER_CD,
ACQR_PCR_CTRY_CD,
ACQR_PCR_TIER_CD,
ISSR_BIN,
OWNR_BUS_ID,
USER_BUS_ID,
MRCH_LOCN_REF_ID,
NTWRK_ID,
STIP_ADVC_CD,
AUTHN_RESP_CD,
AUTHN_ACTVY_CD,
RESP_TM_ID,
PROD_REF_ID,
MRCH_REF_ID,
ISSR_PCR,
ISSR_CTRY_CD,
ACCT_NUM,
TRAN_CNT,
USD_TRAN_AMT)
VALUES (
src_cpd_dt_array(j),
src_acqr_ctry_cd_array(j),
null,
src_acqr_pcr_ctry_cd_array(j),
null,
src_issr_bin_array(j),
null,
null,
src_mrch_locn_ref_id_array(j),
src_ntwrk_id_array(j),
src_stip_advc_cd_array(j),
src_authn_resp_cd_array(j),
src_authn_actvy_cd_array(j),
src_resp_tm_id_array(j),
null,
src_mrch_ref_id_array(j),
src_issr_pcr_array(j),
src_issr_ctry_cd_array(j),
src_acct_num_array(j),
src_tran_cnt_array(j),
src_usd_tran_amt_array(j));
COMMIT;
END bulkload;
SHOW ERRORS
-----------------------------------------------------------------------------do you have a unique key available in the rows you are fetching?
It seems a cursor with 20 million rows that is as wide as all the columnsyou want to work with is a lot of memory for the server to use at once. You may be able to do this with parallel processing (dop over 8) and a lot of memory for the warehouse box (and the box you are extracting data from)...but is this the most efficient (and thereby fastest) way to do it?
What if you used a cursor to select a unique key only, and then during the cursor loop fetch each record, transform it, and insert it into the target?
Its a different way to do a lot at once, but it cuts down on the overall memory overhead for the process.
I know this isnt as elegant as a single insert to do it all at once, but sometimes trimming a process down so it takes less resources at any given moment is much faster than trying to do the whole thing at once.
My solution is probably biased by transaction systems, so I would be interested in what the data warehouse community thinks of this.
For example:
source table my_transactions (tx_seq_id number, tx_fact1 varchar2(10), tx_fact2 varchar2(20), tx_fact3 number, ...)
select a cursor of tx_seq_id only (even at 20 million rows this is not much)
you could then either use a for loop or even bulk collect into a plsql collection or table
then process individually like this:
procedure process_a_tx(p_tx_seq_id in number)
is
rTX my_transactions%rowtype;
begin
select * into rTX from my_transactions where tx_seq_id = p_tx_seq_id;
--modify values as needed
insert into my_target(a, b, c) values (rtx.fact_1, rtx.fact2, rtx.fact3);
commit;
exception
when others
rollback;
--write to a log or raise and exception
end process_a_tx;
procedure collect_tx
is
cursor tx is
select tx_seq_id from my_transactions;
begin
for rTx in cTx loop
process_a_tx(rtx.tx_seq_id);
end loop;
end collect_tx; -
How to handle the bad record while using bulk collect with limit.
Hi
How to handle the Bad record as part of the insertion/updation to avoid the transaction.
Example:
I am inserting into table with LIMIT of 1000 records and i've got error at 588th record.
i want to commit the transaction with 588 inserted record in table and log the error into
error logging table then i've to continue with transaction with 560th record.
Can anyone suggest me in this case.
Regards,
yuva>
How to handle the Bad record as part of the insertion/updation to avoid the transaction.
>
Use the SAVE EXCEPTIONS clause of the FORALL if you are doing bulk inserts.
See SAVE EXCEPTIONS in the PL/SQL Language doc
http://docs.oracle.com/cd/B28359_01/appdev.111/b28370/tuning.htm
And then see Example 12-9 Bulk Operation that continues despite exceptions
>
Example 12-9 Bulk Operation that Continues Despite Exceptions
-- Temporary table for this example:
CREATE TABLE emp_temp AS SELECT * FROM employees;
DECLARE
TYPE empid_tab IS TABLE OF employees.employee_id%TYPE;
emp_sr empid_tab;
-- Exception handler for ORA-24381:
errors NUMBER;
dml_errors EXCEPTION;
PRAGMA EXCEPTION_INIT(dml_errors, -24381);
BEGIN
SELECT employee_id
BULK COLLECT INTO emp_sr FROM emp_temp
WHERE hire_date < '30-DEC-94';
-- Add '_SR' to job_id of most senior employees:
FORALL i IN emp_sr.FIRST..emp_sr.LAST SAVE EXCEPTIONS
UPDATE emp_temp SET job_id = job_id || '_SR'
WHERE emp_sr(i) = emp_temp.employee_id;
-- If errors occurred during FORALL SAVE EXCEPTIONS,
-- a single exception is raised when the statement completes.
EXCEPTION
-- Figure out what failed and why
WHEN dml_errors THEN
errors := SQL%BULK_EXCEPTIONS.COUNT;
DBMS_OUTPUT.PUT_LINE
('Number of statements that failed: ' || errors);
FOR i IN 1..errors LOOP
DBMS_OUTPUT.PUT_LINE('Error #' || i || ' occurred during '||
'iteration #' || SQL%BULK_EXCEPTIONS(i).ERROR_INDEX);
DBMS_OUTPUT.PUT_LINE('Error message is ' ||
SQLERRM(-SQL%BULK_EXCEPTIONS(i).ERROR_CODE));
END LOOP;
END;
DROP TABLE emp_temp; -
Error in bulk collect into nested table
I keep getting an error while trying to compile this line:
fetch c_juros bulk collect into wrk_juros_plano(p_ind_segreg);
LINE/COL ERROR
0/0 PLS-00801: internal error [74306]When i put that single line into comments it goes fine. Sure it doesn't do what I want.
The data structure i use is as follows:
cursor c_juros(p_ind_segreg in varchar2) is
select (((power(1 + (i.prc_juros_atuari_ano / 100), 1 / 12) - 1) * 100) / 100) prc_juros_efetiv_mes,
i.dat_inic_vigenc,
(nvl(i.dat_fim_vigenc, sysdate) + 1) dat_fim_vigenc,
i.ind_segreg
from v_indexador_taxa_atuarial i
where i.ind_segreg = p_ind_segreg
order by i.dat_inic_vigenc;
type t_juros_plano is table of c_juros%rowtype;
type t_tab_juros_plano is table of t_juros_plano index by binary_integer;
wrk_juros_plano t_tab_juros_plano;the code goes like this:
begin
if not(wrk_juros_plano.exists(p_ind_segreg))
then
if c_juros%isopen
then
close c_juros;
end if;
open c_juros(p_ind_segreg);
wrk_juros_plano(p_ind_segreg) := t_juros_plano();
fetch c_juros bulk collect into wrk_juros_plano(p_ind_segreg);
end if;
...p_ind_segreg is my input parameter, that should be the index of the array.
The purpose is to create the parameter indexed element if it doesn't already exist, fetching it
from the cursor, that defines a nested table.
I tried removing the initialization line to no effect, among other things.Ok, I just found out a way around it. It works, but that error is probably a bug, cause workarounds are not really cute.
I declared a nested table compatible with the element from the associative array:
wrk_juros t_juros_plano;and chaged that line that was causing the error
fetch c_juros bulk collect into wrk_juros_plano(p_ind_segreg);for
fetch c_juros bulk collect into wrk_juros;
wrk_juros_plano(p_ind_segreg) := wrk_juros;Awesome =\ -
Hi all, I have this annoying problem and I am looking for any suggestions.
I have 2 select boxes. One is for all available users, and second - for selected users (designated as admins). The list of all users is available in a collection (2 properties userId and userName displayed in the code below). The list of admins contains only userId (as strings).
I have no problem with populating selected users (admins) list, reusing pretty much the same logic below, but I cannot find a way to break out from the nested loop once the match is found, to avoid repetitions leading to incorrect display of results.
<select name=available>
<c:forEach items="${users}" var="user" varStatus="outer">
<c:forEach items="${adminIds}" var="adminId" varStatus="inner">
<c:if test="${user.userId!=adminId">
<option value="<c:out value="${user.userId}" />"><c:out value="${user.userFullName}"/></option>
</c:if>
</c:forEach>
</c:forEach>
</select>
<select name=selected>
<c:forEach items="${users}" var="user" varStatus="outer">
<c:forEach items="${adminIds}" var="adminId" varStatus="inner">
<c:if test="${user.userId==adminId">
<option value="<c:out value="${user.userId}" />"><c:out value="${user.userFullName}"/></option>
</c:if>
</c:forEach>
</c:forEach>
</select>Can anyone help, please? I am also restricted to JSP 1.2Double post: http://forum.java.sun.com/thread.jspa?threadID=707950&tstart=0
-
How to improve performance using bulk collects with plsql tables or arrays
Hi All,
my procedure is like this
declare
cursor c1 is select ----------------------
begin
assigning to variables
validations on that variables
--50 validations are here --
insert into a table
end;
we have created indexes on primary keys,
i want to use
DECLARE
CURSOR a_cur IS
SELECT program_id
FROM airplanes;
TYPE myarray IS TABLE OF a_cur%ROWTYPE;
cur_array myarray;
BEGIN
OPEN a_cur;
LOOP
FETCH a_cur BULK COLLECT INTO cur_array LIMIT 100;
***---------can i assign cursor data to the plsql table variables or array***
***validate on the pl sql variable as---***
i
nsert into a table
EXIT WHEN a_cur%NOTFOUND;
END LOOP;
CLOSE a_cur;
END;
Edited by: Veekay on Oct 21, 2011 4:28 AMFastest way often is this:
insert /*+append */
into aTable
select * from airplanes;
commit;The select and insert part can even be done in parallel if needed.
However if the oparation is complex or the dataset is very very very very very large or the programmer is decent but not excellent then the bulk approach should be considered. It is often a pretty stable and linear scaling approach.
The solution depends a little on the database version.
LOOP
FETCH a_cur BULK COLLECT INTO cur_array LIMIT 100;
EXIT WHEN a_cur.count = 0;
forall i in a_cur.first.. a_cur.last
insert into aTable (id)
values (a_cur(i));
END LOOP;
...If you have more then one column then you might need a single collection for each column. Other possibilities depend on the db version.
Also: do not exit using a_cur%NOTFOUND. This is wrong! You might loose records from the end of the data set. -
Problem with Nested loop in Fox-Formula
Dear Experts,
Let s share the scenario :
MaterialGroups with following Keys for Example AAAA, BBBB, CCCC..., Materialgroups are selected in the 1st input ready query, which is assigned to DataProvider DP_1 in a webtemplate.
every Materialgroup has several Materials, for instance:
Materialgroup AAAA has following Materials: AAAA10, AAAA11, AAAA12, AAAA13...
Materials are selected in a second input ready Query, which is assigned to a second DataProvider DP_2 in the Same Webtemplate as the query 1.
Both Materialgroup and Material are based on the same Aggreagtion level and same real time cube.
I want to copy the input values for every MaterialGroup ( 1st query, DP_1) only to it s own Materials (2cond Query, DP_2).
To resolve this Issue i wrote the following Fox Formula code with a nested loop, however it does not work properly. when I m debugging the code, i could release that the second Loop was ignored.but wehn i replace the second loop (nested loop) with a fixed value it s seems to work properly
DATA MG1 TYPE MATG.<------ MaterialGroup
DATA MT1 TYPE MAT.<----
Material
DATA S1 TYPE STRING.
DATA S2 TYPE STRING.
DATA S3 TYPE STRING
DATA K TYPE F.
DATA Z TYPE F.
FOREACH MG1.
To check Materialgroup in debugger
S1= MG1.
BREAK-POINT.
K = {KEYfIG, #, MG1}.
BREAK-POINT.
FOREACH MT1. <----- if i set MT1 to a fixed value like AAAA11 then S3 get the wished value namely AAAA
S2 = MT1.
BREAK-POINT.
S3 = SUBSTR (MT1, 0, 4).
BREAK-POINT.
IF S1 = S3.
{KEYFIG, MT1, #} = K.
following Statement is only used To check in debugger if Material has become the same Materialgroup value
Z = {KEYFIG, MT1, #}.
BREAK-POINT.
ENDIF.
ENDFOR.
ENDFOR.
Thakns for any help
Frank
Edited by: FRYYYBM on Mar 17, 2011 10:54 PM
Edited by: FRYYYBM on Mar 17, 2011 11:06 PMHi,
Please try this way.
DATA MG1 TYPE MATG.<------ MaterialGroup
DATA MT1 TYPE MAT.<----
Material
DATA S1 TYPE STRING.
DATA S2 TYPE STRING.
DATA S3 TYPE STRING
DATA K TYPE F.
DATA Z TYPE F.
FOREACH MT1.
FOREACH MG1.
To check Materialgroup in debugger
S1= MG1.
BREAK-POINT.
K = {KEYfIG, #, MG1}.
BREAK-POINT.
FOREACH MT1. <----- if i set MT1 to a fixed value like AAAA11 then S3 get the wished value namely AAAA
S2 = MT1.
BREAK-POINT.
S3 = SUBSTR (MT1, 0, 4).
BREAK-POINT.
IF S1 = S3.
{KEYFIG, MT1, #} = K.
following Statement is only used To check in debugger if Material has become the same Materialgroup value
Z = {KEYFIG, MT1, #}.
BREAK-POINT.
ENDIF.
ENDFOR.
ENDFOR.
ENDFOR.
Thanks.
With regards,
Anand Kumar -
Using bulk collect with select
i am working on oracle 10g release 2 .
My requirement is like this
1 declare
2 type id_type is table of fnd_menus.menu_id%type;
3 id_t id_type;
4 cursor cur_menu is select menu_name from menu;
5 type name_type is table of menu.menu_name%type;
6 name_t name_type;
7 begin
8 open cur_menu;
9 fetch cur_menu bulk collect into name_t;
10 forall i in name_t.first..name_t.last
11 select menu_id into id_t(i) from fnd_menus where menu_name = name_t(i);
12* end;
SQL> /
select menu_id into id_t(i) from fnd_menus where menu_name = name_t(i);
ERROR at line 11:
ORA-06550: line 11, column 23:
PLS-00437: FORALL bulk index cannot be used in INTO clause
ORA-06550: line 11, column 31:
PL/SQL: ORA-00904: : invalid identifier
ORA-06550: line 11, column 3:
PL/SQL: SQL Statement ignoredSo how i can bulk select into a table the rows that satisfy a particular condition ?A forall statement is used bulk execute DML, as can be read [url http://download.oracle.com/docs/cd/B19306_01/appdev.102/b14261/forall_statement.htm#LNPLS01321]here in the documentation.
I guess you want something like this:
SQL> create table menu
2 as
3 select 'A' menu_name from dual union all
4 select 'B' from dual union all
5 select 'C' from dual
6 /
Tabel is aangemaakt.
SQL> create table fnd_menus
2 as
3 select 10 menu_id, 'A' menu_name from dual union all
4 select 9, 'B' from dual union all
5 select 8, 'C' from dual union all
6 select 7, 'D' from dual
7 /
Tabel is aangemaakt.
SQL> declare
2 type id_type is table of fnd_menus.menu_id%type;
3 id_t id_type;
4 cursor cur_menu is select menu_name from menu;
5 type name_type is table of menu.menu_name%type;
6 name_t name_type;
7 begin
8 open cur_menu;
9 fetch cur_menu bulk collect into name_t;
10 forall i in name_t.first..name_t.last
11 select menu_id into id_t(i) from fnd_menus where menu_name = name_t(i);
12 end;
13 /
select menu_id into id_t(i) from fnd_menus where menu_name = name_t(i);
FOUT in regel 11:
.ORA-06550: Regel 11, kolom 25:
PLS-00437: FORALL-bulkindex kan niet worden gebruikt in INTO-clausule..
ORA-06550: Regel 11, kolom 33:
PL/SQL: ORA-00904: : ongeldige ID.
ORA-06550: Regel 11, kolom 5:
PL/SQL: SQL Statement ignored.
SQL> declare
2 type id_type is table of fnd_menus.menu_id%type;
3 id_t id_type;
4 begin
5 select menu_id
6 bulk collect into id_t
7 from fnd_menus
8 where menu_name in (select menu_name from menu)
9 ;
10 for i in 1..id_t.count
11 loop
12 dbms_output.put_line(id_t(i));
13 end loop
14 ;
15 end;
16 /
10
9
8
PL/SQL-procedure is geslaagd.Regards,
Rob. -
Bulk Collect with FORALL not working - Not enough values error
Hi,
I am trying to copy data from one table to another which are having different number of columns. I am doing the following. But it threw not enough values error.
Table A has more than 10 millions of records. So I am using bulk collect instead of using insert into select from.
TABLE A (has more columns - like 25)
c1 Number
c2 number
c3 varchar2
c4 varchar2
c25 varchar2
TABLE B (has less columns - like 7)
c1 Number
c2 number
c3 varchar2
c4 varchar2
c5 number
c7 date
c10 varchar2
declare
TYPE c IS REF CURSOR;
v_c c;
v_Sql VARCHAR2(2000);
TYPE array is table of B%ROWTYPE;
l_data array;
begin
v_Sql := 'SELECT c1, c2, c3, c4, c5, c7, c10 FROM A ORDER BY c1';
OPEN v_c FOR v_Sql;
LOOP
FETCH v_c BULK COLLECT INTO ldata LIMIT 100000;
FORALL i in 1 .. ldata.count
INSERT
INTO B
VALUES ldata(i);
END LOOP;
COMMIT;
exception
WHEN OTHERS THEN
ROLLBACK;
dbms_output.put_line('Exception Occurred' || SQLERRM);
END;
When I execute this, I am getting
PL/SQL: ORA-00947: not enough values
Any suggestions please. Thanks in advance.Table A has more than 10 millions of records. So I am using bulk collect instead of using insert into select from.That doesn't make sense to me. An INSERT ... SELECT is going to be more efficient, more maintainable, easier to write, and easier to understand.
INSERT INTO b( c1, c2, c3, c4, c5, c7, c10 )
SELECT c1, c2, c3, c4, c5, c7, c10
FROM a;is going to be faster, use fewer resources, be far less error-prone, and have a far more obvious purpose when some maintenance programmer comes along than any PL/SQL block that does the same thing.
If you insist on using PL/SQL, what version of Oracle are you using? You should be able to do something like
DECLARE
TYPE b_tbl IS TABLE OF b%rowtype;
l_array b_tbl;
CURSOR a_cursor
IS SELECT c1, c2, c3, c4, c5, c7, c10 FROM A;
BEGIN
OPEN a_cursor;
LOOP
FETCH a_cursor
BULK COLLECT INTO l_array
LIMIT 10000;
EXIT WHEN l_array.COUNT = 0;
FORALL i IN l_array.FIRST .. l_array.LAST
INSERT INTO b
VALUES l_array(i);
END LOOP;
COMMIT;
END;That at least eliminates the infinite loop and the unnecessary dynamic SQL. If you're using older versions of Oracle (it's always helpful to post that information up front), the code may need to be a bit more complex.
Justin
Edited by: Justin Cave on Jan 19, 2011 5:46 PM -
Send some example of bulk collect option in loop
Hi
I have three type parameter which is bulk collect from same table
i want to use two of the parameter to verify the data in other table.
and if data won't find using this 3rd bulk collect option to update 3 rd table..
help is appreciated,http://download.oracle.com/docs/cd/E11882_01/appdev.112/e10472/composites.htm
-
Problem with nested loops in collections
Hi All,
I am trying to insert data captured on a form(elements) into a database table thru a view.
I have a process for insert that looks like..
for x in (select * from htmldb_collections where collection_name = 'PARTY')
loop
for i in 1..50
insert into imds_part_num (mdb_nr,part_nr) values(:P5_MDB_NR,to_number(x.c00(i)));
end loop;
end loop;
the error is 'C00' must be declared.
I have no error when i tried to insert with process like this....
for x in (select * from htmldb_collections where collection_name = 'PARTY')
loop
insert into imds_part_num (mdb_nr,part_nr) values(:P5_MDB_NR,to_number(x.c001));
end loop;
i have 50 items on my form page . i added members into the collection like this,
htmldb_collection.add_member(p_collection_name => 'PARTY',
p_c001 => :P5_ENTWICKLUNGSNUMMER,
p_c002 => :P5_GEHAEUSE_PT,
p_c003 => :P5_GEHAEUSE,
p_c004 => :P5_PRESSTEIL,
p_c005 => :P5_MAEL ,
p_c006 => :P5_KGE_WNZ......, before the process mentioned above....
I see the collection as a VARRAY.
Cud somebody point me the mistake, or a workaround for this.
Thank you.Chalamalasetty,
When you have -
x.c00(i)You're actually trying to access the Nth item in an array, for example if i=2 then you're trying to access the 2nd item in the c00 array.
However, your query which performs the select from htmldb_collections is not returning an array called c00, it does infact return distinct columns such as -
jes@DBTEST> desc htmldb_collections;
Name Null? Type
COLLECTION_NAME NOT NULL VARCHAR2(255)
SEQ_ID NOT NULL NUMBER
C001 VARCHAR2(4000)
C002 VARCHAR2(4000)
C003 VARCHAR2(4000)
C004 VARCHAR2(4000)
C005 VARCHAR2(4000)
C006 VARCHAR2(4000)
....... extra lines deleted .......So you would need to specify the columns individually. If you wished to do this dynamically then you would need to make your insert statement a dynamic statement and build it up as a string.
If you can explain precisely what you're trying to achieve then it will be easier to suggest alternative ways of doing it. -
ORA-06502 during a procedure which uses Bulk collect feature and nested tab
Hello Friends,
have created one procedure which uses Bulk collect and nested table to hold the bulk data. This procedure was using one cursor and a nested table with the same type as the cursor to hold data fetched from cursor. Bulk collection technique was used to collect data from cursor to nested table. But it is giving ORA-06502 error.
I reduced code of procedure to following to trace the error point. But still error is comming. Please help us to find the cause and solve it.
Script which is giving error:
declare
v_Errorflag BINARY_INTEGER;
v_flag number := 1;
CURSOR cur_terminal_info Is
SELECT distinct
'a' SettlementType
FROM
dual;
TYPE typ_cur_terminal IS TABLE OF cur_terminal_info%ROWTYPE;
Tab_Terminal_info typ_cur_Terminal;
BEGIN
v_Errorflag := 2;
OPEN cur_terminal_info;
LOOP
v_Errorflag := 4;
FETCH cur_terminal_info BULK COLLECT INTO tab_terminal_info LIMIT 300;
EXIT WHEN cur_terminal_info%rowcount <= 0;
v_Errorflag := 5;
FOR Y IN Tab_Terminal_Info.FIRST..tab_terminal_info.LAST
LOOP
dbms_output.put_line(v_flag);
v_flag := v_flag + 1;
end loop;
END LOOP;
v_Errorflag := 13;
COMMIT;
END;
I have updated script as following to change datatype as varchar2 for nested table, but still same error is
comming..
declare
v_Errorflag BINARY_INTEGER;
v_flag number := 1;
CURSOR cur_terminal_info Is
SELECT distinct
'a' SettlementType
FROM
dual;
TYPE typ_cur_terminal IS TABLE OF varchar2(50);
Tab_Terminal_info typ_cur_Terminal;
BEGIN
v_Errorflag := 2;
OPEN cur_terminal_info;
LOOP
v_Errorflag := 4;
FETCH cur_terminal_info BULK COLLECT INTO tab_terminal_info LIMIT 300;
EXIT WHEN cur_terminal_info%rowcount <= 0;
v_Errorflag := 5;
FOR Y IN Tab_Terminal_Info.FIRST..tab_terminal_info.LAST
LOOP
dbms_output.put_line(v_flag);
v_flag := v_flag + 1;
end loop;
END LOOP;
v_Errorflag := 13;
COMMIT;
I could not find the exact reason of error.
Please help us to solve this error.
Thanks and Regards..
Dipali..Hello Friends,
I got the solution.. :)
I did one mistake in procedure where the loop should end.
I used the statemetn: EXIT WHEN cur_terminal_info%rowcount <= 0;
But it should be: EXIT WHEN Tab_Terminal_Info.COUNT <= 0;
Now my script is working fine.. :)
Thanks and Regards,
Dipali..
Maybe you are looking for
-
Adding a Descendents Query to a Cube
Hi, I have the following MDX query that I wish to add to a cube: with member [measures].[descendents test] as sum Descendants ([Date].[Calendar Year],, LEAVES) * Descendants ([Date].[Calendar Quarter of Year],, LEAVES) * Descendants ([Dat
-
Hp LaserJet M1217 nfw MFP won't allow me to setup wireless...
I am trying to setup a newly purchased, refurbished hp LaserJet M1217 nfw MFP so that I can print wirelessly, but it isn't giving me the option to do it. I started the process trying to follow the directions on a Windows 8 (hiss!!!) machine. The co
-
i need to configure a dial up VPN...i have a 16 port modem network module in my router to terminate dialup connections....i want to have users dial into the router and have a VPN to the network.... i was thinking creating the dial up...once thats est
-
Can I download a kindle version of a book and use it on an IPad, if so, how?
-
Can't access Filter-Render-Lighting Effects
I have an image in Bridge that exists both as a jpg and camera raw. I can open the jpg version and apply Filter - Render - Lighting Effects. When I try to do the same with the camera raw image, I get as far as Render, and then the Lighting Effects op