What is bulk binding....?
Hi
any can pls explain what is bulk binding...?
thanks in adv.
KK
Q: what is bulk bind?
A: Something that can be looked up in the documentation.
Cheers, APC
Similar Messages
-
ORA-06502: PL/SQL: numeric or value error: Bulk Bind: Truncated Bind
Hi
I am getting this run time error ORA-06502: PL/SQL: numeric or value error: Bulk Bind: Truncated Bind in my pl/sql . I tried evrything , changing datatypes ,etc but still this error is coming .What can be the cause , please help.
declare
svid xxpor_utility.p_svid@sppmig1%type;
p_sv_id xxpor_utility.p_svid@sppmig1%type;
tab xxpor_utility.xxpor_indextab@sppmig1;
svid1 xxpor_utility.p_svid@sppmig1%type;
p_sv_id1 xxpor_utility.p_svid@sppmig1%type;
tab1 xxpor_utility.xxpor_indextab@sppmig1;
svid2 xxpor_utility.p_svid@sppmig1%type;
p_sv_id2 xxpor_utility.p_svid@sppmig1%type;
tab2 xxpor_utility.xxpor_indextab@sppmig1;
svid3 xxpor_utility.p_svid@sppmig1%type;
p_sv_id3 xxpor_utility.p_svid@sppmig1%type;
tab3 xxpor_utility.xxpor_indextab@sppmig1;
v_index t2_error_table.id_value%type;
v_code t2_error_table.error_code%type;
p_error varchar2(600);
k number(20):=0;
v_msg varchar2(2000);
v_commit_count number(10);
v_at_gpid varchar2(512);
v_at_oper varchar2(512);
v_sch varchar2(512);
v_vat varchar2(512);
exp exception;
exp1 exception;
exp2 exception;
exp3 exception;
exp4 exception;
v_pay varchar2(512);
v_res varchar2(512);
v_digit varchar2(512);
v_agree varchar2(512);
v_driver_licence PERSON_HISTORY.drivers_licence%TYPE;
v_cus_gen1 number(10);
v_cus_gen2 number(10);
v_cus_gen3 number(10);
svid_sr number(10);
v_social PERSON_HISTORY.social_security_number%TYPE;
CURSOR person_cur (p_person_id person_history.person_id%TYPE)
IS
SELECT drivers_licence ,social_security_number
FROM PERSON_HISTORY@SPPMIG1
WHERE PERSON_ID=p_person_id --p2(p).person_id
AND EFFECTIVE_START_DATE = (SELECT MAX(EFFECTIVE_START_DATE)
FROM PERSON_HISTORY@sppmig1
WHERE PERSON_ID=p_person_id);--p2(p).person_id) ;
--p number(20):=1;
--j number(20);
cursor c1 is
select * from cus_node_his ;
type temp_c1 is table of customer_node_history%rowtype
index by binary_integer;
t2 temp_c1;
type temp_c2 is table of customer_node_history@slpmig1%rowtype
index by binary_integer;
p2 temp_c2;
/*cursor c2(p_id customer_query.customer_node_id%type) is
select general_1,general_2,general_3
from customer_query@sppmig1 c where c.customer_query_type_id=10003 and
c.customer_node_id(+) =p_id
and c.open_date = (select
max(open_date) from customer_query@sppmig1 where customer_node_id=p_id
and customer_query_type_id=10003 and c.customer_query_id =(select max(customer_query_id) from customer_query@sppmig1
where customer_node_id=p_id and customer_query_type_id=10003));*/
procedure do_bulk_insert is
bulk_errors EXCEPTION;
PRAGMA EXCEPTION_INIT(bulk_errors, -24381);
begin
forall j in 1..t2.count SAVE EXCEPTIONS
insert into aaa values t2(j);
commit;
--t2.delete;
k:=0;
v_msg:=sqlerrm;
EXCEPTION WHEN bulk_errors THEN
FOR L IN 1..SQL%bulk_exceptions.count
LOOP
v_index := SQL%bulk_exceptions(L).ERROR_INDEX;
v_code := sqlerrm(-1 * SQL%bulk_exceptions(L).ERROR_CODE);
--v_index := SQL%bulk_exceptions(j).ERROR_INDEX;
--v_code := sqlerrm(-1 * SQL%bulk_exceptions(j).ERROR_CODE);
INSERT INTO t2_error_table
VALUES('CUSTOMER_NODE_HISTORY',
'CUSTOMER_NODE_ID',
v_msg,
t2(v_index).customer_node_id,
null,
'DO_BULK_INSERT',
v_code
commit;
END LOOP;
end do_bulk_insert;
begin
select value into v_at_gpid from t2_system_parameter@sppmig1 where name='atlanta_group_id';
select value into v_commit_count from t2_system_parameter@sppmig1 where name='batch_size';
select value into v_sch from t2_system_parameter@sppmig1 where name='schedule_id';
select value into v_pay from t2_system_parameter@sppmig1 where name='payment_location_code';
select value into v_at_oper from t2_system_parameter@sppmig1 where name='atlanta_operator_id';
select value into v_digit from t2_system_parameter@sppmig1 where name='digits_to_be_screened';
select value into v_res from t2_system_parameter@sppmig1 where name='responsible_agent';
select value into v_vat from t2_system_parameter@sppmig1 where name='vat_rate';
select value into v_agree from t2_system_parameter@sppmig1 where name='bank_agreement_status';
xxpor_utility.xxpor_loadmemory@sppmig1('CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_TYPE_ID',tab);
xxpor_utility.xxpor_loadmemory@sppmig1('CUSTOMER_NODE_HISTORY','CREDIT_RATING_CODE',tab2);
xxpor_utility.xxpor_loadmemory@sppmig1('CUSTOMER_NODE_HISTORY','PAYMENT_METHOD_CODE',tab3);
xxpor_utility.xxpor_loadmemory@sppmig1('CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_STATUS_CODE',tab1);
open c1;
loop
fetch c1 bulk collect into p2 limit v_commit_count;
for p in 1..p2.count loop
k:=K+1;
begin
xxpor_utility.xxpor_getsvid@sppmig1(p2(p).CUSTOMER_NODE_TYPE_ID,tab,svid);
p_sv_id:=svid;
xxpor_utility.xxpor_getsvid@sppmig1(p2(p).CUSTOMER_NODE_STATUS_CODE,tab1,svid1);
p_sv_id1 :=svid1;
xxpor_utility.xxpor_getsvid@sppmig1(p2(p).CREDIT_RATING_CODE,tab2,svid2);
p_sv_id2:=svid2;
xxpor_utility.xxpor_getsvid@sppmig1(p2(p).PAYMENT_METHOD_CODE,tab3,svid3);
p_sv_id3:=svid3;
OPEN person_cur (p2(p).person_id);
FETCH person_cur INTO v_driver_licence, v_social;
CLOSE person_cur;
--select social_security_number into v_social from person_history@sppmig1 where
--PERSON_ID=p2(p).person_id AND EFFECTIVE_START_DATE = (SELECT MAX(EFFECTIVE_START_DATE) FROM
--PERSON_HISTORY@sppmig1 WHERE PERSON_ID=p2(p).person_id) ;
/*open c2(p2(p).customer_node_id);
fetch c2 into v_cus_gen1, v_cus_gen2, v_cus_gen3;
close c2;
xxpor_utility.get_status_code@sppmig1(v_cus_gen1,v_cus_gen2,v_cus_gen3,svid_sr);*/
svid_sr:=2600000;
t2(k).CUSTOMER_NODE_ID := p2(p).CUSTOMER_NODE_ID;
t2(k).LAST_MODIFIED := p2(p).LAST_MODIFIED;
t2(k).EFFECTIVE_START_DATE := p2(p).EFFECTIVE_START_DATE;
t2(k).EFFECTIVE_END_DATE := p2(p).EFFECTIVE_END_DATE;
t2(k).CUSTOMER_NODE_TYPE_ID := p_sv_id;
if p_sv_id is null then
raise exp1;
end if;
t2(k).PRIMARY_IDENTIFIER := p2(p).PRIMARY_IDENTIFIER;
t2(k).PRIMARY_IDENTIFIER2 := p2(p).PRIMARY_IDENTIFIER2;
t2(k).NODE_NAME := p2(p).NODE_NAME ;
t2(k).NODE_NAME_UPPERCASE := p2(p).NODE_NAME_UPPERCASE ;
t2(k).NODE_NAME_SOUNDEX := p2(p).NODE_NAME_SOUNDEX;
t2(k).ATLANTA_GROUP_ID := v_at_gpid ;
t2(k).ATLANTA_OPERATOR_ID := p2(p).ATLANTA_OPERATOR_ID;
t2(k).GL_CODE_ID := p2(p).GL_CODE_ID;
t2(k).PARENT_CUSTOMER_NODE_ID := p2(p).PARENT_CUSTOMER_NODE_ID ;
t2(k).HIERARCHY_LEVEL := p2(p).HIERARCHY_LEVEL ;
t2(k).ROOT_CUSTOMER_NODE_ID := p2(p).ROOT_CUSTOMER_NODE_ID ;
t2(k).CUSTOMER_NODE_STATUS_CODE := p_sv_id1 ;
if p_sv_id1 is null then
raise exp2;
end if;
t2(k).CREATED_DATE := p2(p).CREATED_DATE;
t2(k).ACTIVE_DATE := p2(p).ACTIVE_DATE ;
t2(k).PERSON_ID := p2(p).PERSON_ID ;
t2(k).PRIME_ACCOUNT_ID := p2(p).PRIME_ACCOUNT_ID;
t2(k).REPORT_LEVEL_CODE := p2(p).REPORT_LEVEL_CODE;
t2(k).POSTAL_ADDRESS_ID := p2(p).POSTAL_ADDRESS_ID;
t2(k).SITE_ADDRESS_ID := p2(p).SITE_ADDRESS_ID ;
t2(k).CURRENCY_ID := p2(p).CURRENCY_ID;
t2(k).SCHEDULE_ID := v_sch;
t2(k).BILLING_PRIORITY := p2(p).BILLING_PRIORITY ;
t2(k).BILLING_COMPLEXITY:= p2(p).BILLING_COMPLEXITY ;
t2(k).BILLING_CONFIGURATION_CODE := p2(p).BILLING_CONFIGURATION_CODE;
t2(k).SUPPRESS_IND_CODE := p2(p).SUPPRESS_IND_CODE ;
t2(k).SUPPRESS_BILL_CYCLE_COUNT := p2(p).SUPPRESS_BILL_CYCLE_COUNT;
t2(k).SUPPRESS_UNTIL_ISSUE_DATE := p2(p).SUPPRESS_UNTIL_ISSUE_DATE;
t2(k).TURNOVER := p2(p).TURNOVER;
t2(k).TURNOVER_CURRENCY_ID := p2(p).TURNOVER_CURRENCY_ID ;
t2(k).CREDIT_LIMIT := p2(p).CREDIT_LIMIT ;
t2(k).CREDIT_LIMIT_CURRENCY_ID := p2(p).CREDIT_LIMIT_CURRENCY_ID;
t2(k).EXPECTED_REVENUE := p2(p).EXPECTED_REVENUE ;
t2(k).EXPECTED_REVENUE_CURRENCY_ID := p2(p).EXPECTED_REVENUE_CURRENCY_ID ;
t2(k).CREDIT_RATING_CODE := p_sv_id2 ;
-- if p_sv_id2 is null then
--raise exp3;
-- end if;
t2(k).CREDIT_COMMENTS := p2(p).CREDIT_COMMENTS ;
t2(k).TAX_CLASS_CODE := 1 ;
t2(k).PAYMENT_METHOD_CODE := p_sv_id3;
--if p_sv_id3 is null then
--raise exp4;
--end if;
t2(k).PAYMENT_LOCATION_CODE := v_pay ;
t2(k).BANK_CODE := NULL;
t2(k).BRANCH_CODE := NULL ;
t2(k).BANK_ACCOUNT_NAME := p2(p).NODE_NAME ;
t2(k).BANK_ACCOUNT_NUMBER := '1000000';
t2(k).BANK_ACCOUNT_REF := v_agree;
t2(k).CARD_TYPE_CODE := p2(p).CARD_TYPE_CODE ;
t2(k).CARD_NUMBER := p2(p).CARD_NUMBER ;
t2(k).CARD_EXPIRY_DATE := NULL ;
t2(k).ASSIGNED_OPERATOR_ID := NULL ;
t2(k).SALES_CHANNEL_CODE := 0;
t2(k).COMPANY_NUMBER := NULL;
t2(k).INDUSTRY_CODE := NULL;
t2(k).REGION_CODE := NULL;
t2(k).GENERAL_1 := v_vat ;
t2(k).GENERAL_2 := svid_sr ;
if svid_sr is null then
raise exp;
end if;
t2(k).GENERAL_3 := v_social ;
t2(k).GENERAL_4 := v_driver_licence ;
t2(k).GENERAL_5 := v_vat;
t2(k).GENERAL_6 := v_res;
t2(k).GENERAL_7 := null||':'||null||':'||'1000000'||':'||null||':'||null||':'||null||':';
t2(k).GENERAL_8 := '2' ;
t2(k).GENERAL_9 := v_digit;
t2(k).GENERAL_10 := p2(p).CUSTOMER_NODE_ID;
exception when exp then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,p2(p).customer_node_id
,null,null,null);
commit;
when exp1 then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,p2(p).customer_node_id
,null,null,'customer_node_type_id is null');
commit;
when exp2 then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,p2(p).customer_node_id
,null,null,'customer_node_status_code is null');
commit;
/*when exp3 then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,p2(p).customer_node_id
,null,null,'credit_rating_code is null');
commit;
when exp4 then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,p2(p).customer_node_id
,null,null,null);
commit;*/
when others then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,p2(p).customer_node_id
,null,null,null);
commit;
end;
if mod(k,v_commit_count)=0 then
do_bulk_insert;
t2.delete;
end if;
end loop;
do_bulk_insert;
exit when c1%notfound;
end loop;
t2.delete;
exception when others then
p_error:= sqlerrm;
insert into t2_error_table values ( 'CUSTOMER_NODE_HISTORY','CUSTOMER_NODE_ID',p_error,null
,null,null,null);
commit;
RAISE;
end;
/Hi there,
Following is the description of the error, you are getting.
ORA-06502:VALUE_ERROR
An arithmetic, conversion, truncation, or size-constraint error occurs. For example, when your program selects a column value into a character variable, if the value is longer than the declared length of the variable, PL/SQL aborts the assignment and raises VALUE_ERROR. In procedural statements, VALUE_ERROR is raised if the conversion of a character string into a number fails. (In SQL statements, INVALID_NUMBER is raised.)
Hopefully this will help. -
The below example is based on an example from Chapter 5 in Release 2 (9.2) documentation (Available in Oracle site).
But it is failing and throwing the following error:
"PLS-00435 DML statement without BULK In-BIND cannot be used inside FORALL".
Can you pl suggest a workaround for this?
CREATE TABLE test_ravi3 (col1 NUMBER, col2 VARCHAR2(20));
CREATE TABLE test_ravi4 (col1 NUMBER, col2 VARCHAR2(20));
INSERT INTO test_ravi3 VALUES(1,'RAVI' );
INSERT INTO test_ravi3 VALUES(2,'RAVI1' );
INSERT INTO test_ravi3 VALUES(3,'RAVI2' );
INSERT INTO test_ravi3 VALUES(4,'RAVI3' );
INSERT INTO test_ravi3 VALUES(5,'RAVI4' );
DECLARE
TYPE RecTabTyp IS TABLE OF test_ravi3%ROWTYPE
INDEX BY BINARY_INTEGER;
TYPE NumTabTyp IS TABLE OF NUMBER
INDEX BY BINARY_INTEGER;
TYPE CharTabTyp IS TABLE OF VARCHAR2(20)
INDEX BY BINARY_INTEGER;
CURSOR c1 IS SELECT col1, col2 FROM test_ravi4;
rec_tab RecTabTyp;
BEGIN
SELECT COL1, COL2 BULK COLLECT INTO REC_TAB FROM TEST_RAVI3
WHERE COL1 < 9;
FORALL I IN REC_TAB.FIRST..REC_TAB.LAST
INSERT INTO TEST_RAVI4 VALUES REC_TAB(I);
FOR I IN REC_TAB.FIRST..REC_TAB.LAST LOOP
REC_TAB(I).COL1 := REC_TAB(I).COL1 + 100;
END LOOP;
FORALL I IN REC_TAB.FIRST..REC_TAB.LAST
UPDATE TEST_RAVI3 SET (COL1 ,COL2) = REC_TAB(I);
OPEN C1;
FETCH C1 BULK COLLECT INTO REC_TAB;
CLOSE C1;
END;Is block abc based on Table1? If it is, then you should NOT be issuing ANY update sql statements on the table. This just causes problems.
Assuming abc is based on Table1, then the block should contain a text item for all three fields: Field1, col1, and col2. Then all you need is:
Go_block('abc');
first_record;
Loop
exit when :system.record_status='NEW';
if :abc.field1 = 'Y' then
:abc.col1 := value1;
end if;
exit when :system.last_record='TRUE';
end loop;
first_record;
commit_form;
Forms will take care of creating and executing the sql update statement, and it will do it using rowid, which is better than "where col2 = :abc.col2;" Also, you will no longer have locking problems.
...However... if abc is based on a table other than Table1, then something like your code is what you need to use. The fastest method of execution would be the bulk binding route, but do it from a package procedure on the database that you call from the form. -
PL/SQL: numeric or value error: Bulk bind: Error in define
Hello
Does anyone know what this error means?
PL/SQL: numeric or value error: Bulk bind: Error in define
I've checked the cursor and all of the columns are correct.
Any help would be greatly appreciated as this is quite urgent.
Cheers
DavidHello
Here's the top of the function. The exception is thrown on the FETCH...BULK COLLECT line.
FUNCTION f_ps_insert_jlines(ac_JournalRows IN sys_refcursor,
an_StartLn IN OUT INTEGER,
at_Monetary IN OUT pt_monetary_amount,
at_Stats IN OUT pt_statistics_amount
) RETURN INTEGER
IS
l_jrnl_id pt_jrnl_id;
l_jrnl_line pt_jrnl_line;
l_jrnl_date pt_jrnl_date;
l_Ps_Account pt_ps_account;
l_ps_dept_id pt_ps_dept_id;
l_ps_product pt_ps_product;
l_ps_project_id pt_ps_project_id;
l_statistics_code pt_statistics_code;
l_jrnl_ln_ref pt_jrnl_ln_ref;
l_line_desc pt_line_desc;
ln_Index INTEGER;
BEGIN
FETCH ac_JournalRows BULK COLLECT INTO
l_jrnl_id,
l_jrnl_line,
l_jrnl_date,
l_Ps_Account,
l_ps_dept_id,
l_ps_product,
l_ps_project_id,
l_statistics_code,
at_monetary,
at_stats,
l_jrnl_ln_ref,
l_line_desc;And here is the top of the cursor....
SELECT
l_char_jrnl_id AS journal_id,
ROWNUM + l_jrnl_Line AS journal_ln,
lcr_Journal_Header.Journal_Dt AS journal_dt,
ps_account,
ps_dept_id,
ps_product,
ps_project_id,
statistics_code,
monetary_amount,
statistics_amount,
jrnl_ln_ref,
line_desc
FROMAny ideas?
Cheers
David
p.s. No idea what has happened to the formatting! -
Hi All,
Seems that Bulk binding of pl/sql do enhance performance, but have no information about its drawback. Does anyone know what drawback that bulk binding have? Thank you.
CH.The only drawback I am aware of is that in some circumstances, using bind variables may cause the optimizer to choose a sub-optimal plan for a query.
Say you have a table with 1,000,000 rows. 900,000 rows have the value 1 in an indexed column, and the other 100,000 have unique values.
Clearly, if you issue something like:
SELECT *
FROM table
WHERE indexed_column = 256you would want to use an index. However, if you are looking for indexed_column = 1, then a Full Table Scan would be more efficient. With bind variables, Oracle (prior to 91) will most likely choose the FTS route because of the statisitics. However, if you always query on the unique values, that is not the right plan. In 9i the optimizer "peeks" at the bind variable the first time the statement is parsed and chooses the plan based on that value. In this case, if you always query on the unique values, Oracle is much more likely to get the "correct" plan.
HTH
John -
Bulk binding issues in 9i/8i
Hi all,
I would like to know, if there is any change ,in the bulk binding feature of PL/SQL in Oracle 9i .
Please guide me on following issues ,
1) We use it since it was introduced in 8i and now we are migrating to 9i. I would be happy to exploit any new feature, if there .
2) when I am inserting or updating with buld binding, the collection I use must be dense (not sparse).Is there any way to oversome this limitation ?
3) Suppose there is a table of 1 million rows. I have to transfer the data from this table to another using bulk binding. What I want to do is, want to transfer the rows in batches of 10000 rows. As collecting 1 million records data once in a collection does not look like a good idea.
Please advice.
Regards.There are almost always new features with each successive release. There are a few sections in the PL/SQL User's Guide http://download-west.oracle.com/docs/cd/B10501_01/appdev.920/a96624/05_colls.htm#23723 that discuss bulk operations in 9.2-- you probably want to take a look at that. There is also a book from Oracle Press called "Oracle 9i New Features" which is excellent.
The LIMIT clause may have been introduced in 9i, i.e.
FETCH <<some cursor>>
BULK COLLECT INTO <<some collection>>
LIMIT 1000There is a section in the PL/SQL User's Guide pertaining to the LIMIT clause http://download-west.oracle.com/docs/cd/B10501_01/appdev.920/a96624/05_colls.htm#29027
Justin
Distributed Database Consulting, Inc.
http://www.ddbcinc.com/askDDBC -
Hi All,
i am new for bulk binding when i am trying to insert data in table getting error PL/SQL: ORA-00904: : invalid identifier
i need to insert data in to one column below are the desc for the same .
OR REPLACE function OMDV4_IDENTITY_OWNER.testing2(enterno in number ) return number
TYPE t_tab IS TABLE OF emp.empno%TYPE;
:= t_tab();
NUMBER;
EXCEPTION;
PRAGMA EXCEPTION_INIT(ex_dml_errors, -01400);
-- Fill the collection.
FOR i IN 1 .. 5 LOOP
.extend;
(l_tab.last):= i;
END LOOP;
delete from emp;
commit;
-- Perform a bulk operation.
BEGIN
FORALL i IN l_tab.first .. l_tab.last
INSERT INTO EMP
VALUES l_tab(i); ------ here i am getting error
EXCEPTION
WHEN ex_dml_errors THEN
:= SQL%BULK_EXCEPTIONS.count;
('Number of failures: ' || l_error_count);
FOR i IN 1 .. l_error_count LOOP
('Error: ' || i ||
' Array Index: ' || SQL%BULK_EXCEPTIONS(i).error_index ||
' Message: ' || SQLERRM(-SQL%BULK_EXCEPTIONS(i).ERROR_CODE));
END LOOP;
END;
return 1;
END;
ThanksI hope that you are just practicing BULK COLLECT. Because what you are doing using bulk collect is completely unnecessary. This can be simply done in SQL.
What is your DB version. Referring record type directly in value clause is available from 11g onwards I guess. In previous version you get the following error.
PL/SQL: ORA-03001: unimplemented feature
So to fix your problem your FORALL code should be like this
forall i in l_tab.first .. l_tab.last
insert into emp_t (empno) values (l_tab(i));
That's because l_tab contains the value of EMPNO alone. so you need to specify the column name in your insert statement. Also you need to use Brackets before and after l_tab(i). -
Jdbc thin driver bulk binding slow insertion performance problem
Hello All,
We have a third party application reporting slow insertion performance, while I traced the session and found out most of elapsed time for one insert execution is sql*net more data from client, it appears bulk binding is being used here because one execution has 200 rows inserted. I am wondering whether this has something to do with their jdbc thin driver(10.1.0.2 version) and our database version 9205. Do you have any similar experience on this, what other possible directions should I explore?
here is the trace report from 10046 event, I hide table name for privacy reason.
Besides, I tested bulk binding in PL/SQL to insert 200 rows in one execution, no problem at all. Network folks confirm that network should not be an issue as well, ping time from app server to db server is sub milisecond and they are in the same data center.
INSERT INTO ...
values
(:1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :11, :12, :13, :14, :15, :16, :17,
:18, :19, :20, :21, :22, :23, :24, :25, :26, :27, :28, :29, :30, :31, :32,
:33, :34, :35, :36, :37, :38, :39, :40, :41, :42, :43, :44, :45)
call count cpu elapsed disk query current rows
Parse 1 0.00 0.00 0 0 0 0
Execute 1 0.02 14.29 1 94 2565 200
Fetch 0 0.00 0.00 0 0 0 0
total 2 0.02 14.29 1 94 2565 200
Misses in library cache during parse: 1
Optimizer goal: CHOOSE
Parsing user id: 25
Elapsed times include waiting on following events:
Event waited on Times Max. Wait Total Waited
---------------------------------------- Waited ---------- ------------
SQL*Net more data from client 28 6.38 14.19
db file sequential read 1 0.02 0.02
SQL*Net message to client 1 0.00 0.00
SQL*Net message from client 1 0.00 0.00
********************************************************************************I have exactly the same problem, I tried to find out what is going on, changed several JDBC Drivers on AIX, but no hope, I also have ran the process on my laptop which produced a better and faster performance.
Therefore I made a special solution ( not practical) by creating flat files and defining the data as an external table, the oracle will read the data in those files as they were data inside a table, this gave me very fast insertion into the database, but still I am looking for an answer for your question here. Using Oracle on AIX machine is a normal business process followed by a lot of companies and there must be a solution for this. -
Jdbc thin driver and bulk binding slow insertion performance
Hello All,
We have a third party application reporting slow insertion performance, while I traced the session and found out most of elapsed time for one insert execution is sql*net more data from client, it appears bulk binding is being used here because one execution has 200 rows inserted. I am wondering whether this has something to do with their jdbc thin driver(10.1.0.2 version) and our database version 9205. Do you have any similar experience on this, what other possible directions should I explore?
here is the trace report from 10046 event, I hide table name for privacy reason.
Besides, I tested bulk binding in PL/SQL to insert 200 rows in one execution, no problem at all. Network folks confirm that network should not be an issue as well, ping time from app server to db server is sub milisecond and they are in the same data center.
INSERT INTO ...
values
(:1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :11, :12, :13, :14, :15, :16, :17,
:18, :19, :20, :21, :22, :23, :24, :25, :26, :27, :28, :29, :30, :31, :32,
:33, :34, :35, :36, :37, :38, :39, :40, :41, :42, :43, :44, :45)
call count cpu elapsed disk query current rows
Parse 1 0.00 0.00 0 0 0 0
Execute 1 0.02 14.29 1 94 2565 200
Fetch 0 0.00 0.00 0 0 0 0
total 2 0.02 14.29 1 94 2565 200
Misses in library cache during parse: 1
Optimizer goal: CHOOSE
Parsing user id: 25
Elapsed times include waiting on following events:
Event waited on Times Max. Wait Total Waited
---------------------------------------- Waited ---------- ------------
SQL*Net more data from client 28 6.38 14.19
db file sequential read 1 0.02 0.02
SQL*Net message to client 1 0.00 0.00
SQL*Net message from client 1 0.00 0.00
********************************************************************************I have exactly the same problem, I tried to find out what is going on, changed several JDBC Drivers on AIX, but no hope, I also have ran the process on my laptop which produced a better and faster performance.
Therefore I made a special solution ( not practical) by creating flat files and defining the data as an external table, the oracle will read the data in those files as they were data inside a table, this gave me very fast insertion into the database, but still I am looking for an answer for your question here. Using Oracle on AIX machine is a normal business process followed by a lot of companies and there must be a solution for this. -
Dynamic SQL and Bulk Bind... Interesting Problem !!!
Hi Forum !!
I've got a very interesting problem involving Dynamic SQL and Bulk Bind. I really Hope you guys have some suggestions for me...
Table A contains a column named TX_FORMULA. There are many strings holding expressions like '.3 * 2 + 1.5' or '(3.4 + 2) / .3', all well formed numeric formulas. I want to calculate each formula, finding the number obtained as a result of each calculation.
I wrote something like this:
DECLARE
TYPE T_FormulasNum IS TABLE OF A.TX_FORMULA%TYPE
INDEX BY BINARY_INTEGER;
TYPE T_MontoIndicador IS TABLE OF A.MT_NUMBER%TYPE
INDEX BY BINARY_INTEGER;
V_FormulasNum T_FormulasNum;
V_MontoIndicador T_MontoIndicador;
BEGIN
SELECT DISTINCT CD_INDICADOR,
TX_FORMULA_NUMERICA
BULK COLLECT INTO V_CodIndicador, V_FormulasNum
FROM A;
FORALL i IN V_FormulasNum.FIRST..V_FormulasNum.LAST
EXECUTE IMMEDIATE
'BEGIN
:1 := TO_NUMBER(:2);
END;'
USING V_FormulasNum(i) RETURNING INTO V_MontoIndicador;
END;
But I'm getting the following messages:
ORA-06550: line 22, column 43:
PLS-00597: expression 'V_MONTOINDICADOR' in the INTO list is of wrong type
ORA-06550: line 18, column 5:
PL/SQL: Statement ignored
ORA-06550: line 18, column 5:
PLS-00435: DML statement without BULK In-BIND cannot be used inside FORALL
Any Idea to solve this problem ?
Thanks in Advance !!Hallo,
many many errors...
1. You can use FORALL only in DML operators, in your case you must use simple FOR LOOP.
2. You can use bind variables only in DML- Statements. In other statements you have to use literals (hard parsing).
3. RETURNING INTO - Clause in appropriate , use instead of OUT variable.
4. Remark: FOR I IN FIRST..LAST is not fully correct: if you haven't results, you get EXCEPTION NO_DATA_FOUND. Use Instead of 1..tab.count
This code works.
DECLARE
TYPE T_FormulasNum IS TABLE OF VARCHAR2(255)
INDEX BY BINARY_INTEGER;
TYPE T_MontoIndicador IS TABLE OF NUMBER
INDEX BY BINARY_INTEGER;
V_FormulasNum T_FormulasNum;
V_MontoIndicador T_MontoIndicador;
BEGIN
SELECT DISTINCT CD_INDICATOR,
TX_FORMULA_NUMERICA
BULK COLLECT INTO V_MontoIndicador, V_FormulasNum
FROM A;
FOR i IN 1..V_FormulasNum.count
LOOP
EXECUTE IMMEDIATE
'BEGIN
:v_motto := TO_NUMBER('||v_formulasnum(i)||');
END;'
USING OUT V_MontoIndicador(i);
dbms_output.put_line(v_montoindicador(i));
END LOOP;
END;You have to read more about bulk- binding and dynamic sql.
HTH
Regards
Dmytro
Test table
a
(cd_indicator number,
tx_formula_numerica VARCHAR2(255))
CD_INDICATOR TX_FORMULA_NUMERICA
2 (5+5)*2
1 2*3*4
Message was edited by:
Dmytro Dekhtyaryuk -
Accessing the index in bulk binds
Hi
I wonder if it is possible to access the index itself in a bulk bind. For instance:
SQL> desc draft_responces;
Name Type Nullable Default Comments
S_NUMBER VARCHAR2(10)
CLUSTER_ID NUMBER
STATEMENT_ID NUMBER
RESPONCE NUMBER Y
RESP_NUM NUMBER Y
and I wished to insert the responce number i into the resp_num:
forall i in indices of p_statement_ids
insert into draft_responces
s_number
,cluster_id
,statement_id
,responce
,resp_num)
values (
p_snumber
,p_cluster_id
,p_statement_ids(i)
,p_responces(i)
, i );
which fails ... is it possible to access the index i in a bulk bind explicitly?
thanksYou're in the wrong forum (this one is for issues with the SQL Developer tool). You'll get more answers in the SQL And PL/SQL forum.
Have fun,
K. -
Bulk Binds-How to avoid naming all columns?
When using Bulk Binds for inserts/updates,One has to explicitly use all column names in declaration.This is maint intensive as
addition of any colum will require changes in code at multiple locations.
Is there any way to reference the columns though other means(such as records)?
thanksZia wrote:
1) I have a detail report which contains more than 100 thousands rows. This report is already on menu and available to users. User have got an option on menu that they can select output either 'screen' or 'spreadsheet'. If user select option as 'spreadsheet', report open in excel file but in old excel and truncating excessive rows as .xls have limitation of maximum 65 thousands rows. I do not want to use 'csv' format as directly conversion into excel is more convinient for users. If report contains less than 65 thousands rows then everything is fine and no issues with the report. I hope I could explain real problem this time.65 thousands rows is the limitation of that version u use. I use Microsoft Office 2010 where limitation is 1048576 is a sheet.
One solution is uninstall the present Office version and use Office 2010.
2) Regarding second point, I have a summary report which is all fine and showing desired output in desired format. But when user want output in excel file, some of report columns occupy more than one columns and causing difficulties for users. Actually users do need to convert few reports into excel and work with formulas in excel but in this situation they have to do lots of changes in format before applying formulas. A sample screen shot of a report and converted report in excel file both can be seen on this link [https://skydrive.live.com/?cid=573511bde4261fe6#cid=573511BDE4261FE6&id=573511BDE4261FE6%21120]
I'm not sure about the solution. Most probably you have more space in repeating frame and cause this thing happen. shorter the space in the frame and try.
Hope this will help you. -
Bulk Binding-How to avoid naming all columns
When using Bulk Binds for inserts/updates,One has to explicitly use all column names in declaration.This is maint intensive as
addition of any colum will require changes in code at multiple locations.
Is there any way to reference the columns though other means(such as records)?
thanksZia wrote:
1) I have a detail report which contains more than 100 thousands rows. This report is already on menu and available to users. User have got an option on menu that they can select output either 'screen' or 'spreadsheet'. If user select option as 'spreadsheet', report open in excel file but in old excel and truncating excessive rows as .xls have limitation of maximum 65 thousands rows. I do not want to use 'csv' format as directly conversion into excel is more convinient for users. If report contains less than 65 thousands rows then everything is fine and no issues with the report. I hope I could explain real problem this time.65 thousands rows is the limitation of that version u use. I use Microsoft Office 2010 where limitation is 1048576 is a sheet.
One solution is uninstall the present Office version and use Office 2010.
2) Regarding second point, I have a summary report which is all fine and showing desired output in desired format. But when user want output in excel file, some of report columns occupy more than one columns and causing difficulties for users. Actually users do need to convert few reports into excel and work with formulas in excel but in this situation they have to do lots of changes in format before applying formulas. A sample screen shot of a report and converted report in excel file both can be seen on this link [https://skydrive.live.com/?cid=573511bde4261fe6#cid=573511BDE4261FE6&id=573511BDE4261FE6%21120]
I'm not sure about the solution. Most probably you have more space in repeating frame and cause this thing happen. shorter the space in the frame and try.
Hope this will help you. -
This is driving me nuts!
I am getting this error from OWB during a mapping process.
I have checked the input data and it looks fine.
The run time audit browser just lists all of the steps but does not make it clear which one failed. Is it the last one which is shown (does not have HIDE as selection link.)
I also tried to determine which row was causing the problem and followed the instructions at http://www.nicholasgoodman.com/bt/blog/2005/07/, but no row_id was recorded in the views. In actual fact there wasn't very much audit info other than that the mapping ran and was complete (even though it failed).
In the error message section it has, in order
Map Step - blank
Rowkey - 35204435256
Severity - X
Error Message - ORA-06502: PL/SQL: numeric or value error: Bulk Bind: Truncated Bind
Object Name - N/A
Object Column - *
From the PL/SQL error I thought it may be trying to insert into a data field that was too small, but all of the columns that are used are full of data of length shorter than all of the target tables.
I have even started running the cursors in the generated PLSQL but I don't get the error by doing this.
Thanks in advance for any tips at all.Thanks for the response.
I managed to work it out and it had to do with the selection criteria of one of the filters.
For anyone else facing this error, check to see if any of the rows being inserted has the same key/identifier as some existing in the target table. If so add an extra condition to the where clause.
This worked for me. -
ORA-06502: PL/SQL: numeric or value error: Bulk Bind: Truncated Bink
I have a map which worked fine in 10.2.0.1. The same map in 11.2.0.2 is giving me the error:
'ORA-06502: PL/SQL: numeric or value error: Bulk Bind: Truncated Bink.'
I have one source and one target. This is a straight load, no transformations.
While debugging the map I have noticed the culprit is one column in the source which is varchar2(30),
I have the target column with the same varchar2(30), and I tried increasing the size of
the target column but i keep getting the same error. While searching the forum someone suggested
to change the configuration of code generation options and runtime parameters to set based.
But strangely it gave me an error because the set based option is not availabe in the new 11.2.0.2.
Should the set based option be available in this version. Please suggest on how i could resolve the
error of 'ORA-06502: PL/SQL: numeric or value error: Bulk Bind: Truncated Bink.' Thank you.Hi there,
Following is the description of the error, you are getting.
ORA-06502:VALUE_ERROR
An arithmetic, conversion, truncation, or size-constraint error occurs. For example, when your program selects a column value into a character variable, if the value is longer than the declared length of the variable, PL/SQL aborts the assignment and raises VALUE_ERROR. In procedural statements, VALUE_ERROR is raised if the conversion of a character string into a number fails. (In SQL statements, INVALID_NUMBER is raised.)
Hopefully this will help.
Maybe you are looking for
-
How to validate your LCES PDF-Gen install on WebSphere?
Hi, I have installed LiveCycle ES 8.2.1 on a Windows Server machine with WebSphere App Server. This machine has the Adobe Acrobat Pro 9.0 installed. (I do not have Pro 9.0 Extended). (I hope that is ok.) Anyway, this machine also has MS Office 2003 S
-
Icon in system prefs not displaying correctly
In the system prefs pane, the speech icon is being dispayed as a box with a light switch and the text cut off. Any ideas on how to get this back to the original microphone. Thanks.
-
Windows virtual machine on a Mac mini
Can the Fusion drive in a Mac mini be partitioned to run a Windows7 virtual machine?
-
I want remove some dark spots on a hand and color it flesh color. I'm very new to Lightroom, can anyone help me with this? Thanks in advance for any help!
-
Blinking dash problem on Macbook 2008 unibody after installation Windows 7
Hello everybody, I installed Windows 7 (professional x86) with bootcamp (3.0.1), installation good, but when I want to boot on Windows 7, I have a blinking dash and I can't do anything. I have a macbook 2008 unibody, last version of mac osx. Before I