Error in .dbf to table loading

Hi all ,
I am loading a .dbf file into a table.I took code from google for it.But I dnt know why its not working when I am running code.
error coming as below..can anyone help me ..
BEGIN
EXPRESS.DBF2ORA.LOAD_TABLE ( ‘DATA_201′, ‘SAL_HEAD.DBF’, ‘dbf_tab_pc’, NULL, false );
END;
error is ....
ORA-22285: non-existent directory or file for FILEOPEN operation
ORA-06512: at “EXPRESS.DBF2ORA”, line 414
ORA-06512: at line 2
package code is ...
package body dbase_pkg
as
-- Might have to change on your platform!!!
-- Controls the byte order of binary integers read in
-- from the dbase file
BIG_ENDIAN constant boolean default TRUE;
type dbf_header is RECORD
version varchar2(25), -- dBASE version number
year int, -- 1 byte int year, add to 1900
month int, -- 1 byte month
day int, -- 1 byte day
no_records int, -- number of records in file,
-- 4 byte int
hdr_len int, -- length of header, 2 byte int
rec_len int, -- number of bytes in record,
-- 2 byte int
no_fields int -- number of fields
type field_descriptor is RECORD
name varchar2(11),
type char(1),
length int, -- 1 byte length
decimals int -- 1 byte scale
type field_descriptor_array
is table of
field_descriptor index by binary_integer;
type rowArray
is table of
varchar2(4000) index by binary_integer;
g_cursor binary_integer default dbms_sql.open_cursor;
-- Function to convert a binary unsigned integer
-- into a PLSQL number
function to_int( p_data in varchar2 ) return number
is
l_number number default 0;
l_bytes number default length(p_data);
begin
if (big_endian)
then
for i in 1 .. l_bytes loop
l_number := l_number +
ascii(substr(p_data,i,1)) *
power(2,8*(i-1));
end loop;
else
for i in 1 .. l_bytes loop
l_number := l_number +
ascii(substr(p_data,l_bytes-i+1,1)) *
power(2,8*(i-1));
end loop;
end if;
return l_number;
end;
-- Routine to parse the DBASE header record, can get
-- all of the details of the contents of a dbase file from
-- this header
procedure get_header
(p_bfile in bfile,
p_bfile_offset in out NUMBER,
p_hdr in out dbf_header,
p_flds in out field_descriptor_array )
is
l_data varchar2(100);
l_hdr_size number default 32;
l_field_desc_size number default 32;
l_flds field_descriptor_array;
begin
p_flds := l_flds;
l_data := utl_raw.cast_to_varchar2(
dbms_lob.substr( p_bfile,
l_hdr_size,
p_bfile_offset ) );
p_bfile_offset := p_bfile_offset + l_hdr_size;
p_hdr.version := ascii( substr( l_data, 1, 1 ) );
p_hdr.year := 1900 + ascii( substr( l_data, 2, 1 ) );
p_hdr.month := ascii( substr( l_data, 3, 1 ) );
p_hdr.day := ascii( substr( l_data, 4, 1 ) );
p_hdr.no_records := to_int( substr( l_data, 5, 4 ) );
p_hdr.hdr_len := to_int( substr( l_data, 9, 2 ) );
p_hdr.rec_len := to_int( substr( l_data, 11, 2 ) );
p_hdr.no_fields := trunc( (p_hdr.hdr_len - l_hdr_size)/
l_field_desc_size );
for i in 1 .. p_hdr.no_fields
loop
l_data := utl_raw.cast_to_varchar2(
dbms_lob.substr( p_bfile,
l_field_desc_size,
p_bfile_offset ));
p_bfile_offset := p_bfile_offset + l_field_desc_size;
p_flds(i).name := rtrim(substr(l_data,1,11),chr(0));
p_flds(i).type := substr( l_data, 12, 1 );
p_flds(i).length := ascii( substr( l_data, 17, 1 ) );
p_flds(i).decimals := ascii(substr(l_data,18,1) );
end loop;
p_bfile_offset := p_bfile_offset +
mod( p_hdr.hdr_len - l_hdr_size,
l_field_desc_size );
end;
function build_insert
( p_tname in varchar2,
p_cnames in varchar2,
p_flds in field_descriptor_array ) return varchar2
is
l_insert_statement long;
begin
l_insert_statement := 'insert into ' || p_tname || '(';
if ( p_cnames is NOT NULL )
then
l_insert_statement := l_insert_statement ||
p_cnames || ') values (';
else
for i in 1 .. p_flds.count
loop
if ( i <> 1 )
then
l_insert_statement := l_insert_statement||',';
end if;
l_insert_statement := l_insert_statement ||
'"'|| p_flds(i).name || '"';
end loop;
l_insert_statement := l_insert_statement ||
') values (';
end if;
for i in 1 .. p_flds.count
loop
if ( i <> 1 )
then
l_insert_statement := l_insert_statement || ',';
end if;
if ( p_flds(i).type = 'D' )
then
l_insert_statement := l_insert_statement ||
'to_date(:bv' || i || ',''yyyymmdd'' )';
else
l_insert_statement := l_insert_statement ||
':bv' || i;
end if;
end loop;
l_insert_statement := l_insert_statement || ')';
return l_insert_statement;
end;
function get_row
( p_bfile in bfile,
p_bfile_offset in out number,
p_hdr in dbf_header,
p_flds in field_descriptor_array ) return rowArray
is
l_data varchar2(4000);
l_row rowArray;
l_n number default 2;
begin
l_data := utl_raw.cast_to_varchar2(
dbms_lob.substr( p_bfile,
p_hdr.rec_len,
p_bfile_offset ) );
p_bfile_offset := p_bfile_offset + p_hdr.rec_len;
l_row(0) := substr( l_data, 1, 1 );
for i in 1 .. p_hdr.no_fields loop
l_row(i) := rtrim(ltrim(substr( l_data,
l_n,
p_flds(i).length ) ));
if ( p_flds(i).type = 'F' and l_row(i) = '.' )
then
l_row(i) := NULL;
end if;
l_n := l_n + p_flds(i).length;
end loop;
return l_row;
end get_row;
procedure show( p_hdr in dbf_header,
p_flds in field_descriptor_array,
p_tname in varchar2,
p_cnames in varchar2,
p_bfile in bfile )
is
l_sep varchar2(1) default ',';
procedure p(p_str in varchar2)
is
l_str long default p_str;
begin
while( l_str is not null )
loop
dbms_output.put_line( substr(l_str,1,250) );
l_str := substr( l_str, 251 );
end loop;
end;
begin
p( 'Sizeof DBASE File: ' || dbms_lob.getlength(p_bfile) );
p( 'DBASE Header Information: ' );
p( chr(9)||'Version = ' || p_hdr.version );
p( chr(9)||'Year = ' || p_hdr.year );
p( chr(9)||'Month = ' || p_hdr.month );
p( chr(9)||'Day = ' || p_hdr.day );
p( chr(9)||'#Recs = ' || p_hdr.no_records);
p( chr(9)||'Hdr Len = ' || p_hdr.hdr_len );
p( chr(9)||'Rec Len = ' || p_hdr.rec_len );
p( chr(9)||'#Fields = ' || p_hdr.no_fields );
p( chr(10)||'Data Fields:' );
for i in 1 .. p_hdr.no_fields
loop
p( 'Field(' || i || ') '
|| 'Name = "' || p_flds(i).name || '", '
|| 'Type = ' || p_flds(i).Type || ', '
|| 'Len = ' || p_flds(i).length || ', '
|| 'Scale= ' || p_flds(i).decimals );
end loop;
p( chr(10) || 'Insert We would use:' );
p( build_insert( p_tname, p_cnames, p_flds ) );
p( chr(10) || 'Table that could be created to hold data:');
p( 'create table ' || p_tname );
p( '(' );
for i in 1 .. p_hdr.no_fields
loop
if ( i = p_hdr.no_fields ) then l_sep := ')'; end if;
dbms_output.put
( chr(9) || '"' || p_flds(i).name || '" ');
if ( p_flds(i).type = 'D' ) then
p( 'date' || l_sep );
elsif ( p_flds(i).type = 'F' ) then
p( 'float' || l_sep );
elsif ( p_flds(i).type = 'N' ) then
if ( p_flds(i).decimals > 0 )
then
p( 'number('||p_flds(i).length||','||
p_flds(i).decimals || ')' ||
l_sep );
else
p( 'number('||p_flds(i).length||')'||l_sep );
end if;
else
p( 'varchar2(' || p_flds(i).length || ')'||l_sep);
end if;
end loop;
p( '/' );
end;
procedure load_Table( p_dir in varchar2,
p_file in varchar2,
p_tname in varchar2,
p_cnames in varchar2 default NULL,
p_show in boolean default FALSE )
is
l_bfile bfile;
l_offset number default 1;
l_hdr dbf_header;
l_flds field_descriptor_array;
l_row rowArray;
begin
l_bfile := bfilename( p_dir, p_file );
dbms_lob.fileopen( l_bfile );
get_header( l_bfile, l_offset, l_hdr, l_flds );
if ( p_show )
then
show( l_hdr, l_flds, p_tname, p_cnames, l_bfile );
else
dbms_sql.parse( g_cursor,
build_insert(p_tname,p_cnames,l_flds),
dbms_sql.native );
for i in 1 .. l_hdr.no_records loop
l_row := get_row( l_bfile,
l_offset,
l_hdr,
l_flds );
if ( l_row(0) <> '*' ) -- deleted record
then
for i in 1..l_hdr.no_fields loop
dbms_sql.bind_variable( g_cursor,
':bv'||i,
l_row(i),
4000 );
end loop;
if ( dbms_sql.execute( g_cursor ) <> 1 )
then
raise_application_error( -20001,
'Insert failed ' || sqlerrm );
end if;
end if;
end loop;
end if;
dbms_lob.fileclose( l_bfile );
exception
when others then
if ( dbms_lob.isopen( l_bfile ) > 0 ) then
dbms_lob.fileclose( l_bfile );
end if;
RAISE;
end;
end;
i think i am doing mistake in creating and using directory ..direct me plz..

Hi all ,
I am loading a .dbf file into a table.I took code from google for it.But I dnt know why its not working when I am running code.
error coming as below..can anyone help me ..
BEGIN
EXPRESS.DBF2ORA.LOAD_TABLE ( ‘DATA_201′, ‘SAL_HEAD.DBF’, ‘dbf_tab_pc’, NULL, false );
END;
error is ....
ORA-22285: non-existent directory or file for FILEOPEN operation
ORA-06512: at “EXPRESS.DBF2ORA”, line 414
ORA-06512: at line 2
package code is ...
package body dbase_pkg
as
-- Might have to change on your platform!!!
-- Controls the byte order of binary integers read in
-- from the dbase file
BIG_ENDIAN constant boolean default TRUE;
type dbf_header is RECORD
version varchar2(25), -- dBASE version number
year int, -- 1 byte int year, add to 1900
month int, -- 1 byte month
day int, -- 1 byte day
no_records int, -- number of records in file,
-- 4 byte int
hdr_len int, -- length of header, 2 byte int
rec_len int, -- number of bytes in record,
-- 2 byte int
no_fields int -- number of fields
type field_descriptor is RECORD
name varchar2(11),
type char(1),
length int, -- 1 byte length
decimals int -- 1 byte scale
type field_descriptor_array
is table of
field_descriptor index by binary_integer;
type rowArray
is table of
varchar2(4000) index by binary_integer;
g_cursor binary_integer default dbms_sql.open_cursor;
-- Function to convert a binary unsigned integer
-- into a PLSQL number
function to_int( p_data in varchar2 ) return number
is
l_number number default 0;
l_bytes number default length(p_data);
begin
if (big_endian)
then
for i in 1 .. l_bytes loop
l_number := l_number +
ascii(substr(p_data,i,1)) *
power(2,8*(i-1));
end loop;
else
for i in 1 .. l_bytes loop
l_number := l_number +
ascii(substr(p_data,l_bytes-i+1,1)) *
power(2,8*(i-1));
end loop;
end if;
return l_number;
end;
-- Routine to parse the DBASE header record, can get
-- all of the details of the contents of a dbase file from
-- this header
procedure get_header
(p_bfile in bfile,
p_bfile_offset in out NUMBER,
p_hdr in out dbf_header,
p_flds in out field_descriptor_array )
is
l_data varchar2(100);
l_hdr_size number default 32;
l_field_desc_size number default 32;
l_flds field_descriptor_array;
begin
p_flds := l_flds;
l_data := utl_raw.cast_to_varchar2(
dbms_lob.substr( p_bfile,
l_hdr_size,
p_bfile_offset ) );
p_bfile_offset := p_bfile_offset + l_hdr_size;
p_hdr.version := ascii( substr( l_data, 1, 1 ) );
p_hdr.year := 1900 + ascii( substr( l_data, 2, 1 ) );
p_hdr.month := ascii( substr( l_data, 3, 1 ) );
p_hdr.day := ascii( substr( l_data, 4, 1 ) );
p_hdr.no_records := to_int( substr( l_data, 5, 4 ) );
p_hdr.hdr_len := to_int( substr( l_data, 9, 2 ) );
p_hdr.rec_len := to_int( substr( l_data, 11, 2 ) );
p_hdr.no_fields := trunc( (p_hdr.hdr_len - l_hdr_size)/
l_field_desc_size );
for i in 1 .. p_hdr.no_fields
loop
l_data := utl_raw.cast_to_varchar2(
dbms_lob.substr( p_bfile,
l_field_desc_size,
p_bfile_offset ));
p_bfile_offset := p_bfile_offset + l_field_desc_size;
p_flds(i).name := rtrim(substr(l_data,1,11),chr(0));
p_flds(i).type := substr( l_data, 12, 1 );
p_flds(i).length := ascii( substr( l_data, 17, 1 ) );
p_flds(i).decimals := ascii(substr(l_data,18,1) );
end loop;
p_bfile_offset := p_bfile_offset +
mod( p_hdr.hdr_len - l_hdr_size,
l_field_desc_size );
end;
function build_insert
( p_tname in varchar2,
p_cnames in varchar2,
p_flds in field_descriptor_array ) return varchar2
is
l_insert_statement long;
begin
l_insert_statement := 'insert into ' || p_tname || '(';
if ( p_cnames is NOT NULL )
then
l_insert_statement := l_insert_statement ||
p_cnames || ') values (';
else
for i in 1 .. p_flds.count
loop
if ( i <> 1 )
then
l_insert_statement := l_insert_statement||',';
end if;
l_insert_statement := l_insert_statement ||
'"'|| p_flds(i).name || '"';
end loop;
l_insert_statement := l_insert_statement ||
') values (';
end if;
for i in 1 .. p_flds.count
loop
if ( i <> 1 )
then
l_insert_statement := l_insert_statement || ',';
end if;
if ( p_flds(i).type = 'D' )
then
l_insert_statement := l_insert_statement ||
'to_date(:bv' || i || ',''yyyymmdd'' )';
else
l_insert_statement := l_insert_statement ||
':bv' || i;
end if;
end loop;
l_insert_statement := l_insert_statement || ')';
return l_insert_statement;
end;
function get_row
( p_bfile in bfile,
p_bfile_offset in out number,
p_hdr in dbf_header,
p_flds in field_descriptor_array ) return rowArray
is
l_data varchar2(4000);
l_row rowArray;
l_n number default 2;
begin
l_data := utl_raw.cast_to_varchar2(
dbms_lob.substr( p_bfile,
p_hdr.rec_len,
p_bfile_offset ) );
p_bfile_offset := p_bfile_offset + p_hdr.rec_len;
l_row(0) := substr( l_data, 1, 1 );
for i in 1 .. p_hdr.no_fields loop
l_row(i) := rtrim(ltrim(substr( l_data,
l_n,
p_flds(i).length ) ));
if ( p_flds(i).type = 'F' and l_row(i) = '.' )
then
l_row(i) := NULL;
end if;
l_n := l_n + p_flds(i).length;
end loop;
return l_row;
end get_row;
procedure show( p_hdr in dbf_header,
p_flds in field_descriptor_array,
p_tname in varchar2,
p_cnames in varchar2,
p_bfile in bfile )
is
l_sep varchar2(1) default ',';
procedure p(p_str in varchar2)
is
l_str long default p_str;
begin
while( l_str is not null )
loop
dbms_output.put_line( substr(l_str,1,250) );
l_str := substr( l_str, 251 );
end loop;
end;
begin
p( 'Sizeof DBASE File: ' || dbms_lob.getlength(p_bfile) );
p( 'DBASE Header Information: ' );
p( chr(9)||'Version = ' || p_hdr.version );
p( chr(9)||'Year = ' || p_hdr.year );
p( chr(9)||'Month = ' || p_hdr.month );
p( chr(9)||'Day = ' || p_hdr.day );
p( chr(9)||'#Recs = ' || p_hdr.no_records);
p( chr(9)||'Hdr Len = ' || p_hdr.hdr_len );
p( chr(9)||'Rec Len = ' || p_hdr.rec_len );
p( chr(9)||'#Fields = ' || p_hdr.no_fields );
p( chr(10)||'Data Fields:' );
for i in 1 .. p_hdr.no_fields
loop
p( 'Field(' || i || ') '
|| 'Name = "' || p_flds(i).name || '", '
|| 'Type = ' || p_flds(i).Type || ', '
|| 'Len = ' || p_flds(i).length || ', '
|| 'Scale= ' || p_flds(i).decimals );
end loop;
p( chr(10) || 'Insert We would use:' );
p( build_insert( p_tname, p_cnames, p_flds ) );
p( chr(10) || 'Table that could be created to hold data:');
p( 'create table ' || p_tname );
p( '(' );
for i in 1 .. p_hdr.no_fields
loop
if ( i = p_hdr.no_fields ) then l_sep := ')'; end if;
dbms_output.put
( chr(9) || '"' || p_flds(i).name || '" ');
if ( p_flds(i).type = 'D' ) then
p( 'date' || l_sep );
elsif ( p_flds(i).type = 'F' ) then
p( 'float' || l_sep );
elsif ( p_flds(i).type = 'N' ) then
if ( p_flds(i).decimals > 0 )
then
p( 'number('||p_flds(i).length||','||
p_flds(i).decimals || ')' ||
l_sep );
else
p( 'number('||p_flds(i).length||')'||l_sep );
end if;
else
p( 'varchar2(' || p_flds(i).length || ')'||l_sep);
end if;
end loop;
p( '/' );
end;
procedure load_Table( p_dir in varchar2,
p_file in varchar2,
p_tname in varchar2,
p_cnames in varchar2 default NULL,
p_show in boolean default FALSE )
is
l_bfile bfile;
l_offset number default 1;
l_hdr dbf_header;
l_flds field_descriptor_array;
l_row rowArray;
begin
l_bfile := bfilename( p_dir, p_file );
dbms_lob.fileopen( l_bfile );
get_header( l_bfile, l_offset, l_hdr, l_flds );
if ( p_show )
then
show( l_hdr, l_flds, p_tname, p_cnames, l_bfile );
else
dbms_sql.parse( g_cursor,
build_insert(p_tname,p_cnames,l_flds),
dbms_sql.native );
for i in 1 .. l_hdr.no_records loop
l_row := get_row( l_bfile,
l_offset,
l_hdr,
l_flds );
if ( l_row(0) <> '*' ) -- deleted record
then
for i in 1..l_hdr.no_fields loop
dbms_sql.bind_variable( g_cursor,
':bv'||i,
l_row(i),
4000 );
end loop;
if ( dbms_sql.execute( g_cursor ) <> 1 )
then
raise_application_error( -20001,
'Insert failed ' || sqlerrm );
end if;
end if;
end loop;
end if;
dbms_lob.fileclose( l_bfile );
exception
when others then
if ( dbms_lob.isopen( l_bfile ) > 0 ) then
dbms_lob.fileclose( l_bfile );
end if;
RAISE;
end;
end;
i think i am doing mistake in creating and using directory ..direct me plz..

Similar Messages

  • ORA-26004: Tables loaded through the direct path may not be clustered

    hi ,
    I im planning to upload data to IOT table using sqlldr. but end with error.
    ORA-26004: Tables loaded through the direct path may not be clustered.
    how to resolve this. as this table going to insert high voluem data and to speed up the table quary we created IOT table.
    table create syntax:
    create CLUSTER C_HUA
    B_number number(10),
    A_number number(10),     
    ins_date date,
    account number(10),
    C_number number(10))
    SQL> CREATE TABLE HUA
    A_number number(10),
    B_number number(10),
    ins_date date,
    account number(10),
    C_number number(10)
    CLUSTER C_HUA
    B_number,
    A_number,
    ins_date,
    account,
    C_number);
    SQL> CREATE INDEX HUA_index1 on CLUSTER C_HUA;
    Pl help to resolve this.
    thanks
    ashwan

    You have to use conventional path. DIRECT=false.
    Restrictions on Using Direct Path Loads
    The following conditions must be satisfied for you to use the direct path load method:
    * Tables are not clustered.
    * Tables to be loaded do not have any active transactions pending.
    * Loading a parent table together with a child Table
    * Loading BFILE columns

  • Full Load: Error while executing :TRUNCATE TABLE: S_ETL_PARAM

    Hi All,
    We are using Bi Apps 7.9.6.1. Full Load was running fine. But Now we are facing a problem with truncating a table "S_ETL_PARAM".
    I have restart informatica Server And also DAC Srever. But still I am getting the same in the DAC Log as, *"NOMALY INFO::: Error while executing : TRUNCATE TABLE:S_ETL_PARAM*
    *MESSAGE:::com.siebel.etl.database.IllegalSQLQueryException: DBConnection_OLTP:SIEBTRUN ('siebel.S_ETL_PARAM')*
    *Values :*
    *Null Map*
    *EXCEPTION CLASS::: java.lang.Exception"*
    Any Suggestion.....
    Thanks in Advance,
    Deepak

    are you trying to run incremental load when you get this truncate error? can you re-run full load and see that still runs ok? pls also check your DW side database logs like alert lor any DB level issue. such errors do not throw friendly messages in DAC/Informatica side.

  • Project Analytics 7.9.6.1 - Error while running a full load

    Hi All,
    I am performing a full load for Projects Analytics and get the following error,
    =====================================
    ERROR OUTPUT
    =====================================
    1103 SEVERE Wed Nov 18 02:49:36 WST 2009 Could not attach to workflow because of errorCode 36331 For workflow SDE_ORA_CodeDimension_Gl_Account
    1104 SEVERE Wed Nov 18 02:49:36 WST 2009
    ANOMALY INFO::: Error while executing : INFORMATICA TASK:SDE_ORA11510_Adaptor:SDE_ORA_CodeDimension_Gl_Account:(Source : FULL Target : FULL)
    MESSAGE:::
    Irrecoverable Error
    Error while contacting Informatica server for getting workflow status for SDE_ORA_CodeDimension_Gl_Account
    Error Code = 36331:Unknown reason for error code 36331
    Pmcmd output :
    Session log initialises NULL value to mapping parameter MPLT_ADI_CODES.$$CATEGORY. This is then used insupsequent SQL and results in ORA-00936: missing expression error following are the initialization section and the load section containing the error in the log
    Initialisation
    DIRECTOR> VAR_27028 Use override value [DataWarehouse] for session parameter:[$DBConnection_OLAP].
    DIRECTOR> VAR_27028 Use override value [ORA_11_5_10] for session parameter:[$DBConnection_OLTP].
    DIRECTOR> VAR_27028 Use override value [ORA_11_5_10.DATAWAREHOUSE.SDE_ORA11510_Adaptor.SDE_ORA_CodeDimension_Gl_Account_Segments.log] for session parameter:[$PMSessionLogFile].
    DIRECTOR> VAR_27027 Use default value [] for mapping parameter:[MPLT_ADI_CODES.$$CATEGORY].
    DIRECTOR> VAR_27028 Use override value [4] for mapping parameter:[MPLT_SA_ORA_CODES.$$DATASOURCE_NUM_ID].
    DIRECTOR> VAR_27028 Use override value [DEFAULT] for mapping parameter:[MPLT_SA_ORA_CODES.$$TENANT_ID].
    DIRECTOR> TM_6014 Initializing session [SDE_ORA_CodeDimension_Gl_Account_Segments] at [Wed Nov 18 02:49:11 2009].
    DIRECTOR> TM_6683 Repository Name: [repo_service]
    DIRECTOR> TM_6684 Server Name: [int_service]
    DIRECTOR> TM_6686 Folder: [SDE_ORA11510_Adaptor]
    DIRECTOR> TM_6685 Workflow: [SDE_ORA_CodeDimension_Gl_Account_Segments] Run Instance Name: [] Run Id: [17]
    DIRECTOR> TM_6101 Mapping name: SDE_ORA_CodeDimension_GL_Account_Segments [version 1].
    DIRECTOR> TM_6963 Pre 85 Timestamp Compatibility is Enabled
    DIRECTOR> TM_6964 Date format for the Session is [MM/DD/YYYY HH24:MI:SS]
    DIRECTOR> TM_6827 [C:\Informatica\PowerCenter8.6.1\server\infa_shared\Storage] will be used as storage directory for session [SDE_ORA_CodeDimension_Gl_Account_Segments].
    DIRECTOR> CMN_1802 Session recovery cache initialization is complete.
    DIRECTOR> TM_6703 Session [SDE_ORA_CodeDimension_Gl_Account_Segments] is run by 32-bit Integration Service [node01_ASG596138], version [8.6.1], build [1218].
    MANAGER> PETL_24058 Running Partition Group [1].
    MANAGER> PETL_24000 Parallel Pipeline Engine initializing.
    MANAGER> PETL_24001 Parallel Pipeline Engine running.
    MANAGER> PETL_24003 Initializing session run.
    MAPPING> CMN_1569 Server Mode: [UNICODE]
    MAPPING> CMN_1570 Server Code page: [MS Windows Latin 1 (ANSI), superset of Latin1]
    MAPPING> TM_6151 The session sort order is [Binary].
    MAPPING> TM_6185 Warning. Code page validation is disabled in this session.
    MAPPING> TM_6156 Using low precision processing.
    MAPPING> TM_6180 Deadlock retry logic will not be implemented.
    MAPPING> TM_6307 DTM error log disabled.
    MAPPING> TE_7022 TShmWriter: Initialized
    MAPPING> DBG_21075 Connecting to database [orcl], user [DAC_REP]
    MAPPING> CMN_1716 Lookup [mplt_ADI_Codes.Lkp_Master_Map] uses database connection [Relational:DataWarehouse] in code page [MS Windows Latin 1 (ANSI), superset of Latin1]
    MAPPING> CMN_1716 Lookup [mplt_ADI_Codes.Lkp_Master_Code] uses database connection [Relational:DataWarehouse] in code page [MS Windows Latin 1 (ANSI), superset of Latin1]
    MAPPING> CMN_1716 Lookup [mplt_ADI_Codes.Lkp_W_CODE_D] uses database connection [Relational:DataWarehouse] in code page [MS Windows Latin 1 (ANSI), superset of Latin1]
    MAPPING> TM_6007 DTM initialized successfully for session [SDE_ORA_CodeDimension_Gl_Account_Segments]
    DIRECTOR> PETL_24033 All DTM Connection Info: [<NONE>].
    MANAGER> PETL_24004 Starting pre-session tasks. : (Wed Nov 18 02:49:14 2009)
    MANAGER> PETL_24027 Pre-session task completed successfully. : (Wed Nov 18 02:49:14 2009)
    DIRECTOR> PETL_24006 Starting data movement.
    MAPPING> TM_6660 Total Buffer Pool size is 32000000 bytes and Block size is 128000 bytes.
    READER_1_1_1> DBG_21438 Reader: Source is [asgdev], user [APPS]
    READER_1_1_1> BLKR_16051 Source database connection [ORA_11_5_10] code page: [MS Windows Latin 1 (ANSI), superset of Latin1]
    READER_1_1_1> BLKR_16003 Initialization completed successfully.
    WRITER_1_*_1> WRT_8147 Writer: Target is database [orcl], user [DAC_REP], bulk mode [OFF]
    WRITER_1_*_1> WRT_8221 Target database connection [DataWarehouse] code page: [MS Windows Latin 1 (ANSI), superset of Latin1]
    WRITER_1_*_1> WRT_8124 Target Table W_CODE_D :SQL INSERT statement:
    INSERT INTO W_CODE_D(DATASOURCE_NUM_ID,SOURCE_CODE,SOURCE_CODE_1,SOURCE_CODE_2,SOURCE_CODE_3,SOURCE_NAME_1,SOURCE_NAME_2,CATEGORY,LANGUAGE_CODE,MASTER_DATASOURCE_NUM_ID,MASTER_CODE,MASTER_VALUE,W_INSERT_DT,W_UPDATE_DT,TENANT_ID) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
    WRITER_1_*_1> WRT_8124 Target Table W_CODE_D :SQL UPDATE statement:
    UPDATE W_CODE_D SET SOURCE_CODE_1 = ?, SOURCE_CODE_2 = ?, SOURCE_CODE_3 = ?, SOURCE_NAME_1 = ?, SOURCE_NAME_2 = ?, MASTER_DATASOURCE_NUM_ID = ?, MASTER_CODE = ?, MASTER_VALUE = ?, W_INSERT_DT = ?, W_UPDATE_DT = ?, TENANT_ID = ? WHERE DATASOURCE_NUM_ID = ? AND SOURCE_CODE = ? AND CATEGORY = ? AND LANGUAGE_CODE = ?
    WRITER_1_*_1> WRT_8124 Target Table W_CODE_D :SQL DELETE statement:
    DELETE FROM W_CODE_D WHERE DATASOURCE_NUM_ID = ? AND SOURCE_CODE = ? AND CATEGORY = ? AND LANGUAGE_CODE = ?
    WRITER_1_*_1> WRT_8270 Target connection group #1 consists of target(s) [W_CODE_D]
    WRITER_1_*_1> WRT_8003 Writer initialization complete.
    READER_1_1_1> BLKR_16007 Reader run started.
    WRITER_1_*_1> WRT_8005 Writer run started.
    WRITER_1_*_1> WRT_8158
    Load section
    *****START LOAD SESSION*****
    Load Start Time: Wed Nov 18 02:49:16 2009
    Target tables:
    W_CODE_D
    READER_1_1_1> RR_4029 SQ Instance [mplt_BC_ORA_Codes_GL_Account_Segments.Sq_Fnd_Flex_Values] User specified SQL Query [SELECT
    FND_FLEX_VALUES.FLEX_VALUE_SET_ID,
    FND_FLEX_VALUES.FLEX_VALUE,
    MAX(FND_FLEX_VALUES_TL.DESCRIPTION),
    FND_ID_FLEX_SEGMENTS.ID_FLEX_NUM,
    FND_ID_FLEX_SEGMENTS.APPLICATION_COLUMN_NAME
    FROM
    FND_FLEX_VALUES,
    FND_FLEX_VALUES_TL,
    FND_ID_FLEX_SEGMENTS,
    FND_SEGMENT_ATTRIBUTE_VALUES
    WHERE
    FND_FLEX_VALUES.FLEX_VALUE_ID = FND_FLEX_VALUES_TL.FLEX_VALUE_ID AND FND_FLEX_VALUES_TL.LANGUAGE ='US' AND
    FND_ID_FLEX_SEGMENTS.FLEX_VALUE_SET_ID =FND_FLEX_VALUES.FLEX_VALUE_SET_ID AND
    FND_ID_FLEX_SEGMENTS.APPLICATION_ID = 101 AND
    FND_ID_FLEX_SEGMENTS.ID_FLEX_CODE ='GL#' AND
    FND_ID_FLEX_SEGMENTS.ID_FLEX_NUM =FND_SEGMENT_ATTRIBUTE_VALUES.ID_FLEX_NUM AND
    FND_SEGMENT_ATTRIBUTE_VALUES.APPLICATION_ID =101 AND
    FND_SEGMENT_ATTRIBUTE_VALUES.ID_FLEX_CODE = 'GL#' AND
    FND_ID_FLEX_SEGMENTS.APPLICATION_COLUMN_NAME=FND_SEGMENT_ATTRIBUTE_VALUES.APPLICATION_COLUMN_NAME AND
    FND_SEGMENT_ATTRIBUTE_VALUES.ATTRIBUTE_VALUE ='Y'
    GROUP BY
    FND_FLEX_VALUES.FLEX_VALUE_SET_ID,
    FND_FLEX_VALUES.FLEX_VALUE,
    FND_ID_FLEX_SEGMENTS.ID_FLEX_NUM,
    FND_ID_FLEX_SEGMENTS.APPLICATION_COLUMN_NAME]
    READER_1_1_1> RR_4049 SQL Query issued to database : (Wed Nov 18 02:49:17 2009)
    READER_1_1_1> RR_4050 First row returned from database to reader : (Wed Nov 18 02:49:17 2009)
    LKPDP_3> DBG_21312 Lookup Transformation [mplt_ADI_Codes.Lkp_W_CODE_D]: Lookup override sql to create cache: SELECT W_CODE_D.SOURCE_NAME_1 AS SOURCE_NAME_1, W_CODE_D.SOURCE_NAME_2 AS SOURCE_NAME_2, W_CODE_D.MASTER_DATASOURCE_NUM_ID AS MASTER_DATASOURCE_NUM_ID, W_CODE_D.MASTER_CODE AS MASTER_CODE, W_CODE_D.MASTER_VALUE AS MASTER_VALUE, W_CODE_D.W_INSERT_DT AS W_INSERT_DT, W_CODE_D.TENANT_ID AS TENANT_ID, W_CODE_D.DATASOURCE_NUM_ID AS DATASOURCE_NUM_ID, W_CODE_D.SOURCE_CODE AS SOURCE_CODE, W_CODE_D.CATEGORY AS CATEGORY, W_CODE_D.LANGUAGE_CODE AS LANGUAGE_CODE FROM W_CODE_D
    WHERE
    W_CODE_D.CATEGORY IN () ORDER BY DATASOURCE_NUM_ID,SOURCE_CODE,CATEGORY,LANGUAGE_CODE,SOURCE_NAME_1,SOURCE_NAME_2,MASTER_DATASOURCE_NUM_ID,MASTER_CODE,MASTER_VALUE,W_INSERT_DT,TENANT_ID
    LKPDP_3> TE_7212 Increasing [Index Cache] size for transformation [mplt_ADI_Codes.Lkp_W_CODE_D] from [1000000] to [4734976].
    LKPDP_3> TE_7212 Increasing [Data Cache] size for transformation [mplt_ADI_Codes.Lkp_W_CODE_D] from [2000000] to [2007040].
    READER_1_1_1> BLKR_16019 Read [625] rows, read [0] error rows for source table [FND_ID_FLEX_SEGMENTS] instance name [mplt_BC_ORA_Codes_GL_Account_Segments.FND_ID_FLEX_SEGMENTS]
    READER_1_1_1> BLKR_16008 Reader run completed.
    LKPDP_3> TM_6660 Total Buffer Pool size is 609824 bytes and Block size is 65536 bytes.
    LKPDP_3:READER_1_1> DBG_21438 Reader: Source is [orcl], user [DAC_REP]
    LKPDP_3:READER_1_1> BLKR_16051 Source database connection [DataWarehouse] code page: [MS Windows Latin 1 (ANSI), superset of Latin1]
    LKPDP_3:READER_1_1> BLKR_16003 Initialization completed successfully.
    LKPDP_3:READER_1_1> BLKR_16007 Reader run started.
    LKPDP_3:READER_1_1> RR_4049 SQL Query issued to database : (Wed Nov 18 02:49:18 2009)
    LKPDP_3:READER_1_1> CMN_1761 Timestamp Event: [Wed Nov 18 02:49:18 2009]
    LKPDP_3:READER_1_1> RR_4035 SQL Error [
    ORA-00936: missing expression
    Could you please suggest what the issue might be and how it can be fixed?
    Many thanks,
    Kiran

    I have continued related detains in the following thread,
    Mapping Parameter  $$CATEGORY not included in the parameter file (7.9.6.1)
    Apologies for the inconvenience.
    Thanks,
    Kiran

  • Scheduling error causing - ORA-00942: table or view does not exist

    Hello
    I have the following problem, I created an scenario that its supposed to run every 1 hour, it has an interface that moves an amount of records from one table to another (the amount of records is not fixed it changes). The problem arises when the amount of data to move is so big that the hour between executions is not enough and the scenario is still running when a second execution of the same scenario starts to run (as scheduled), so in the interface when the second process is trying to run the loading step it fails and I got the error ODI-1228 Caused By: java.sql.BatchUpdateException: ORA-00942: table or view does not exists. So the scenario fails and it logs this error. I think the error is due to the table in stage being locked by the first execution of the scenario.
    I don't want to change the amount of time between executions because it is usually enough, I just want to avoid the execution of a scenario when there is another execution of the same scenario still running, is there any way to do this?.
    Thanks
    LJ

    Hello
    Thanks for your answer, I already have the "Interval between repetitions" property set to 1 hour, but when one execution takes more than one hour the scheduler starts another and I have 2 processes accessing the same table and it is when I have a concurrence error on the staging table.
    How can I ensure that I wont have two or more executions of the same scenario running at the same time?.
    Thanks,
    LJ

  • Error while activating DSO after loading from another DSO..Production issue

    Hello Gurus,
    I got an error while activating DSO2 after loading from another DSO1.
    DSO1 load and activation was sucessful but while loading from DSO1 to DSO2 load was fine but got error while activating...
      >Error when assigning SID: Action VAL_SID_CONVERT table ZWEEK_NUM
      >Value u201809u2019 of characteristic ZWEEK_NUM is not a number with 000004 spaces
    this theard was posted after searching for related post in SDN.
    and i tried Function Module RSODS_CHECK_RSODSACTUPDTYPE but it is not available .. but I used this in BW 3.10u2026..
    is their any similar Function Module in BI 7.0 please advice.
    We are using SAP NetWeaver BI 7.0
    Release- 700    Service level- 017
    Thanks in advance
    Sandy

    sloved the issue......
    it was due to wrong data for the related field...
    it  must be "0009 "
    Thanks for ur support. closing the ticket
    Regards
    Sandy

  • Error ODS activation - sql error when accessing a table.

    Hi,
    sometimes occurs an error by activation ODS. I have proces chain and when is loaded second packet an error occurs. In monitor>>
    -RSMPC 128, datapacket 3 is wrong, with status number 9
    -RSMPC 131
    -RSDRO 108 - communcation error (sql error when accessing a table)
    In sm21>
    -sql error when accessing a table
    -The exception, which is assigned to the class 'CX_SY_OPEN_SQL_DB', was  
    either                                                      caught nor passed along using a RAISING clause, in the procedure <b>"UPDATE_ATAB"</b>   "(FORM)"                                                                    
    Since the caller of the procedure could not have expected this exception     
       to occur, the running program was terminated.                               
      The reason for the exception is:                                             
      The database system recognized that your last operation on the database      
      would have led to a deadlock.                                                
      Therefore, your transaction was rolled back                                  
      to avoid this.                                                                       
      ORACLE always terminates any transaction that would result in deadlock.      
      The other transactions involved in this potential deadlock                   
      are not affected by the termination.
    I have BW 3.5.
    Thank You very much.

    There are a few different scenarios that I can think of where this might come up that all involve what might resulting in parallel (concurrent)processes:
    Loading packets in parallel - that is there are X number of processes loading packets concurrently. This could be set in your IMG settings system wide or in the InfoPackage for just this datasource.   You seem to indicate that you don't have this.
    Database parallel processing - RSAMDIN - ORA_PARALLEL_DEGREE ( there was a different RSADMIN parm for older versions - forget what SP the change came with).
    You have multiple InfoPackages for the datasource, each loading what should be a different range of data, and they run atthe same time.
    You could be loading from two different datasources to the ODS at the same time.
    If any of these are true, I would look at bumping the INITRANS setting up.  Your DBA will probably need to do this for table and its indices.  There is a Note - 831234 that allows you to create a parameter in RSADMIN that will specify a INITRANS value (e.g. 20) rather than using the default. The ODS would need to be activated to pick this new setting up for the table to be altered. 
    You could also look at the Processing settings for the InfoPackage and change to PSA first, then target to see if that helps. 
    Or if you are loading from two different datasources at the same time, you might adjust your schedule so that doesn't happen.
    Pizzaman

  • 3-1674105521 Multiple Paths error while using Bridge Table

    https://support.us.oracle.com/oip/faces/secure/srm/srview/SRViewStandalone.jspx?sr=3-1674105521
    Customer Smiths Medical International Limited
    Description: Multiple Paths error while using Bridge Table
    1. I have a urgent customer encounterd a design issue and customer was trying to add 3 logical joins between SDI_GPOUP_MEMBERSHIP and these 3 tables (FACT_HOSPITAL_FINANCE_DTLS, FACT_HOSPITAL_BEDS_UTILZN and FACT_HOSPITAL_ATRIBUTES)
    2. They found found out by adding these 3 joins, they ended with circular error.
    [nQSError: 15001] Could not load navigation space for subject area GXODS.
    [nQSError: 15009] Multiple paths exist to table DIM_SDI_CUSTOMER_DEMOGRAPHICS. Circular logical schemas are not supported.
    In response to this circular error, the developer was able to bypass the error using aliases, but this is not desired by client.
    3. They want to know how to avoid this error totally without using alias table and suggest a way to resolve the circular join(Multiple Path) error.
    Appreciated if someone can give some pointer or suggestion as the customer is in stiff deadline.
    Thanks
    Teik

    The strange thing compared to your output is that I get an error when I have table prefix in the query block:
    Connected to: Oracle Database 11g Enterprise Edition Release 11.2.0.4.0 - 64bit Production
    With the Partitioning, OLAP, Data Mining and Real Application Testing options
    Master table "SYSTEM"."SYS_IMPORT_FULL_01" successfully loaded/unloaded
    Starting "SYSTEM"."SYS_IMPORT_FULL_01":  system/******** DUMPFILE=TMP1.dmp LOGFILE=imp.log PARALLEL=8 QUERY=SYSADM.TMP1:"WHERE TMP1.A = 2" REMAP_TABLE=SYSADM.TMP1:TMP3 CONTENT=DATA_ONLY
    Processing object type TABLE_EXPORT/TABLE/TABLE_DATA
    ORA-31693: Table data object "SYSADM"."TMP3" failed to load/unload and is being skipped due to error:
    ORA-38500: Unsupported operation: Oracle XML DB not present
    Job "SYSTEM"."SYS_IMPORT_FULL_01" completed with 1 error(s) at Fri Dec 13 10:39:11 2013 elapsed 0 00:00:03
    And if I remove it, it works:
    Connected to: Oracle Database 11g Enterprise Edition Release 11.2.0.4.0 - 64bit Production
    With the Partitioning, OLAP, Data Mining and Real Application Testing options
    Master table "SYSTEM"."SYS_IMPORT_FULL_01" successfully loaded/unloaded
    Starting "SYSTEM"."SYS_IMPORT_FULL_01":  system/******** DUMPFILE=TMP1.dmp LOGFILE=imp.log PARALLEL=8 QUERY=SYSADM.TMP1:"WHERE A = 2" REMAP_TABLE=SYSADM.TMP1:TMP3 CONTENT=DATA_ONLY
    Processing object type TABLE_EXPORT/TABLE/TABLE_DATA
    . . imported "SYSADM"."TMP3"                             5.406 KB       1 out of 2 rows
    Job "SYSTEM"."SYS_IMPORT_FULL_01" successfully completed at Fri Dec 13 10:36:50 2013 elapsed 0 00:00:01
    Nicolas.
    PS: as you can see, I'm on 11.2.0.4, I do not have 11.2.0.1 that you seem to use.

  • "Error in function: Add Table not found [131-183]" - TDS Addon Version 3.16

    While loading TDS Addon Version: 3.16 I am getting the undermentioned error:
    "Error in function: Add Table not found [131-183]"
    SAP B1, 2005B, PL 39.
    It also shows the message that it is successfully installed. When I try to access any of the screen the above messge pops up.
    This is very urgent kindly help me out.
    Edited by: Philip Eller on May 16, 2008 8:50 AM
    Edited by: Philip Eller on May 30, 2008 8:41 AM*

    Dear,
    The addon is a third party addon which was developed by CitiXSys. so the support is provided by the partner who developed the addon.
    Go to the vendor Citixys for further support will give you a quick reply.
    Regards
    Apple

  • An error prevented the view from loading

    I'm using SQL Server 2012 Express and I'm building a report with one table.  I dragged and dropped two data set elements into the table that has two columns (Product Name and Product Description).  Both the dataset elements have
    a data type of varchar (50) in SQL Server 2012 Express.  The table with those two elements is the only data I have in Design view.  When I click on Preview, the Preview screen states 'An error prevented the view from loading.'.  This is difficult
    to diagnose, because in the Error List there are '0 Errors', '0 Warnings', and '0 Messages'.  I would like help understanding if there is another error log I can observe to understand the issue, or if there are basic SQL Server 2012 Express Reporting
    Services debugging techniques.  Please help.  Thank you,
    Additionally, when I run the query in SQL Server 2012 Express and Reporting Services Query Designer the query results are correct.
    Also, when I click the start debugging button, the report displays and I can export it to pdf, however the Previous still displays the error.

    Well, funny stuff.  I shut the solution down yesterday, then I booted my computer today and started the solution AND JUST LIKE MAGIC....it works.  I can't explain it.  Persistence pays, that's the answer sometimes.

  • Invalid Number Error for Decimal Field While Loading Data

    I am loading a delimited text file using the SQL* loader however I am reciving an error in my decimal fields. When a decimal field only has leading zeros before the decimal point I receive invalid number error. Below will clarify:
    i.e.) 00000000.30 [*Invalid number*]
    i.e.) 00046567.45 [*Valid number*]
    i.e.) 00000001.00 [*Valid number*]
    I've tried setting the precision/scale in the table, tried declaring it a decimal field instead of number, none of these methods fixed the issue. Any help I would really appreciate.
      POLICY_NUMBER             NUMBER,
      EFFECTIVE_DATE     DATE "YYYYMMDD"          NULLIF EFFECTIVE_DATE = '',
      TRANSACTION_DATE     DATE "YYYYMMDD",
      TRANSACTION_AMOUNT     DECIMAL EXTERNAL,   -- Tried TRANSACTION_AMOUNT DECIMAL EXTERNAL (10)  & TRANSACTION_AMOUNT NUMBER
      MF_TRX_CODE          NUMBER,
      USER_ID          CHAR,
      GROUP_NUMBER          NUMBER,
      EXPIRATION_DATE     DATE "YYYYMMDD"          NULLIF EXPIRATION_DATE = '',
      BILL_NUMBER          NUMBER,Any help is greatly appreciated. Thanks before hand.

    Hi,
    Connected to Oracle Database 10g Express Edition Release 10.2.0.1.0
    Connected as hr
    SQL> SELECT * FROM TEST;
    TRANSACTION_AMOUNT
    SQL> SELECT * FROM TEST;
    TRANSACTION_AMOUNT
              11000,00
                293,37
               2000,00
               1134,32
                  0,30
    SQL>Between the selects I loaded the table with sql*loader using...
    Load Data
    INFILE *
    APPEND
    INTO TABLE TEST
    FIELDS TERMINATED BY '|'
    TRAILING NULLCOLS
      TRANSACTION_AMOUNT     DECIMAL EXTERNAL
    BEGINDATA
    00011000.00
    00000293.37
    00002000.00
    00001134.32
    00000000.30The log is
    SQL*Loader: Release 10.2.0.1.0 - Production on Tue Dec 23 17:23:47 2008
    Copyright (c) 1982, 2005, Oracle.  All rights reserved.
    Control File:   test.ctl
    Data File:      test.ctl
      Bad File:     test.bad
      Discard File:  none specified
    (Allow all discards)
    Number to load: ALL
    Number to skip: 0
    Errors allowed: 50
    Bind array:     64 rows, maximum of 256000 bytes
    Continuation:    none specified
    Path used:      Conventional
    Table TEST, loaded from every logical record.
    Insert option in effect for this table: APPEND
    TRAILING NULLCOLS option in effect
       Column Name                  Position   Len  Term Encl Datatype
    TRANSACTION_AMOUNT                  FIRST     *   |       CHARACTER           
    Table TEST:
      5 Rows successfully loaded.
      0 Rows not loaded due to data errors.
      0 Rows not loaded because all WHEN clauses were failed.
      0 Rows not loaded because all fields were null.
    Space allocated for bind array:                  16512 bytes(64 rows)
    Read   buffer bytes: 1048576
    Total logical records skipped:          0
    Total logical records read:             5
    Total logical records rejected:         0
    Total logical records discarded:        0
    Run began on Tue Dec 23 17:23:47 2008
    Run ended on Tue Dec 23 17:23:50 2008
    Elapsed time was:     00:00:02.86
    CPU time was:         00:00:00.06Regards,

  • SSIS Fact table load

    Hi Experts,
    Need you valuable suggestion.
    I am loading a fact table which is having 16 look ups to get the surrogate keys from dimensions.Volume is around 3million.
    It is taking lot of time to load the table (around 3-4 hours). Is it because of large number of lookup in a single package?
    can you please give some design tips to reduce my load time? Also I am having 6 date fields to lookup with Date dim. Is there any way to get connection once and use it ? I came to know about cache manager for that. Can anyone please help me to understand
    that?
    Thanks
    mukejee

    hi, I am now doing the same action with you,
    loading a fact table which is having 14 look ups to get the surrogate keys from dimensions, while I encounter tough errors when executing fact table, errors are here:
    [FactoryLookup [352]] Error: Row yielded no match during lookup. 
    [FactoryLookup [352]] Error: The "component "FactoryLookup" (352)" failed because error code 0xC020901E occurred, and the error row disposition on "output "Lookup Output" (354)" specifies failure on error. An error occurred on the specified object of the
    specified component. 
    [DTS.Pipeline] Error: The ProcessInput method on component "FactoryLookup" (352) failed with error code 0xC0209029. The identified component returned an error from the ProcessInput method. The error is specific to the component, but the error is fatal and
    will cause the Data Flow task to stop running. 
    [DTS.Pipeline] Error: Thread "WorkThread0" has exited with error code 0xC0209029. 
    I have no idea where goes wrong, Could you please help me?
    Thanks a lot.

  • Error in account determination: table T030K key LOCT MWS

    Hi,
    I have the following error when I try to load de POS idoc´s to SAP * Error in account determination: table T030K key LOCT MWS*.  I already check OB40 and everything it´s fine there also I check FTXP, if some body have any suggestion I really appreciate.

    Hi,
    This error will come when system is unable to find the condition records with tax code !!
    Please go to the item condition in the billing document and do the following steps :
    1) Double click on the tax condition type and see whether the tax code has been picked up or not .
    If you cannot see the tax code that means condition record is not maintained in the system and the value has been entered manually.
    So maintain the condition record as per the access sequence and than release it to accounting .
    Please note once you create the condition record in system you can update the pricing ( using redetermine tax condition value)
    This will resolve ur issue .
    Regards,
    Krishna O
    Edited by: Krishna O on May 25, 2010 7:20 PM

  • Error in creating work table

    Hello!
    Now I am fighting with a new issue with ODI 11.1.1.5. I executed an interface several times with no issues and during the next execution the interface produced the following error:
    ODI-1228: Task SrcSet0 (Loading) fails on the target SUNOPSIS_ENGINE connection MEMORY_ENGINE.
    Caused By: java.sql.SQLException: object name already exists: C$_0Accounts in statement [create table "C$_0Accounts"
         C1_PARENT     VARCHAR(80) NULL,
         C2_CHILD     VARCHAR(80) NULL,
         C3_DESCRIPTION     VARCHAR(80) NULL
         at org.hsqldb.jdbc.Util.sqlException(Unknown Source)
         at org.hsqldb.jdbc.JDBCPreparedStatement.fetchResult(Unknown Source)
         at org.hsqldb.jdbc.JDBCPreparedStatement.execute(Unknown Source)
         at oracle.odi.runtime.agent.execution.sql.SQLCommand.execute(SQLCommand.java:163)
         at oracle.odi.runtime.agent.execution.sql.SQLExecutor.execute(SQLExecutor.java:102)
         at oracle.odi.runtime.agent.execution.sql.SQLExecutor.execute(SQLExecutor.java:1)
         at oracle.odi.runtime.agent.execution.TaskExecutionHandler.handleTask(TaskExecutionHandler.java:50)
         at com.sunopsis.dwg.dbobj.SnpSessTaskSql.processTask(SnpSessTaskSql.java:2906)
         at com.sunopsis.dwg.dbobj.SnpSessTaskSql.treatTask(SnpSessTaskSql.java:2609)
         at com.sunopsis.dwg.dbobj.SnpSessStep.treatAttachedTasks(SnpSessStep.java:537)
         at com.sunopsis.dwg.dbobj.SnpSessStep.treatSessStep(SnpSessStep.java:453)
         at com.sunopsis.dwg.dbobj.SnpSession.treatSession(SnpSession.java:1740)
         at oracle.odi.runtime.agent.processor.impl.StartSessRequestProcessor$2.doAction(StartSessRequestProcessor.java:338)
         at oracle.odi.core.persistence.dwgobject.DwgObjectTemplate.execute(DwgObjectTemplate.java:214)
         at oracle.odi.runtime.agent.processor.impl.StartSessRequestProcessor.doProcessStartSessTask(StartSessRequestProcessor.java:272)
         at oracle.odi.runtime.agent.processor.impl.StartSessRequestProcessor.access$0(StartSessRequestProcessor.java:263)
         at oracle.odi.runtime.agent.processor.impl.StartSessRequestProcessor$StartSessTask.doExecute(StartSessRequestProcessor.java:822)
         at oracle.odi.runtime.agent.processor.task.AgentTask.execute(AgentTask.java:123)
         at oracle.odi.runtime.agent.support.DefaultAgentTaskExecutor$2.run(DefaultAgentTaskExecutor.java:82)
         at java.lang.Thread.run(Thread.java:662)
    Caused by: org.hsqldb.HsqlException: object name already exists: C$_0Accounts
         at org.hsqldb.error.Error.error(Unknown Source)
         at org.hsqldb.SchemaObjectSet.checkAdd(Unknown Source)
         at org.hsqldb.SchemaManager.checkSchemaObjectNotExists(Unknown Source)
         at org.hsqldb.StatementSchema.setOrCheckObjectName(Unknown Source)
         at org.hsqldb.StatementSchema.getResult(Unknown Source)
         at org.hsqldb.StatementSchema.execute(Unknown Source)
         at org.hsqldb.Session.executeCompiledStatement(Unknown Source)
         at org.hsqldb.Session.execute(Unknown Source)
         ... 19 more
    Any hints on how to resolve it will be very highly appreciated!

    Hi, John!
    Thank you so much for your hint! This issue happens with the following dependency: 1) I run an interface and the interface stumbles at a given step with some error, 2) I run the same interface for the second time and it produces the error at the very first step that the work table already exists.
    If I exit ODI studio and then relaunch it, then the interface does not have the error of existing work table. But if the interface has some other error at some step and I run the interface for the second time, then again the error that the work table already exists is reproduced. So every time I want to get rid of this error, I have to exit and relaunch the ODI studio.
    Is there any solution that would not force me to relaunch the ODI studio every time I have this error?

  • Parallel Task Error resolution for Marketing Campaign Load

    Hi,
    During a Marketing campaign load, we encountered the "Parallel Task Error".
    Description of error:
    Error/Warning : Error invoking service 'Mktg Data Load Service', method 'CampaignLoad' at step 'Load Segment Tree Cells/Segments'.(SBL-BPR-00162)*
    +*
    One or more of the parallel tasks may have encountered an error.  Any queued tasks that were scheduled to follow the failed task have been cancelled.(SBL-MKT-00453)*
    Further investigation revealed that the unique constraint on the S_CAMP_CON table was not being met. As per a suggestion by one of the Oracle Support Web's SR, they had asked to change the value of the field 'Token Number' (in the load file format) to a randomly generated number using the formula:
    CAST((RCOUNT(1) + 001) as varchar(40)) || '001')
    However, the issue arises when we assign more than one nodes of a segment tree to a campaign. Due to this, the token number would be repetitive between records of the two nodes.
    Hence, to overcome this error, instead of defaulting the value of Token Number to a system generated field, or using the above function, we made it default to a unique value (for example: account id), coming from a database column. This way we made sure that the token number is unique for all the records being loaded by the campaign.
    This resolved the error, however we would like to confirm if this is the right approach? Can someone let us know if there is any other solution to avoid the error?
    Thanks in advance

    Hi,
    During a Marketing campaign load, we encountered the "Parallel Task Error".
    Description of error:
    Error/Warning : Error invoking service 'Mktg Data Load Service', method 'CampaignLoad' at step 'Load Segment Tree Cells/Segments'.(SBL-BPR-00162)*
    +*
    One or more of the parallel tasks may have encountered an error.  Any queued tasks that were scheduled to follow the failed task have been cancelled.(SBL-MKT-00453)*
    Further investigation revealed that the unique constraint on the S_CAMP_CON table was not being met. As per a suggestion by one of the Oracle Support Web's SR, they had asked to change the value of the field 'Token Number' (in the load file format) to a randomly generated number using the formula:
    CAST((RCOUNT(1) + 001) as varchar(40)) || '001')
    However, the issue arises when we assign more than one nodes of a segment tree to a campaign. Due to this, the token number would be repetitive between records of the two nodes.
    Hence, to overcome this error, instead of defaulting the value of Token Number to a system generated field, or using the above function, we made it default to a unique value (for example: account id), coming from a database column. This way we made sure that the token number is unique for all the records being loaded by the campaign.
    This resolved the error, however we would like to confirm if this is the right approach? Can someone let us know if there is any other solution to avoid the error?
    Thanks in advance

Maybe you are looking for

  • IPad 2 loosing conection with printer

    Hello everybody, I got my iPad 2 since a few months, bought an HP Officejet 6500 E710N printer to be able to print from my iPad. I've gone through most of the web discussions, done most of all suggestions from everybody and nothing solved the problem

  • Visited sites by other computer connected to my WRT54G2

    hi...im new in using this WIFI of mine...WRT54G2..i can see only the IP ADDRESS visited sites by other computers connected to my wifi. is there a way to see the exact websites they were viewing?? or a software perhaps?? tnx 

  • Sales Inquiry

    Hi all, I am working on a research project on implementing Multi-agents into a SAP R/3 system using abap. I am new to abap and want to know a few things. I have taken a small business transaction of creating a sales inquiry. I am trying to display VB

  • Macbook pro won't show icon previews

    Hey there Mac'cers, Okay so ...at first I could see all my icons/thumbnails etc just fine and when i clicked on them they turned blue...well now I cannot see the preview of the thumbnails/icons and also when i click on them it is grey now not blue an

  • Problem Rendering - After Effects CS4 and CS5

    Hello, This problem happens on both my laptops, both of which are more than capable of running and rendering in after effects. When I render the video, it comes out verry choppy and slow. My specs are: Dell Studio XPS 1645 Intel Core i7 820QM Quad Co