Problem while having a large set of data to work on!

Hi,
I am facing great problem with processing large set of data. I have a requirement in which i'm supposed to generate a report.
I have a table and a MView, which i have joined to reduce the number of records to process. The MView holds 200,00,000 records while the table 18,00,000. Based on join conditions and where clause i'm able to break down the useful data to approx 4,50,000 and i'm getting 8 of my report columns from this join. I'm dumping these records into the table from where i'll be generating the report by spooling.
Below is the block which takes 12mins to insert into the report table MY_ACCOUNT_PHOTON_DUMP:
begin
dbms_output.put_line(to_char(sysdate,'hh24:mi:ss'));
insert into MY_ACCOUNT_PHOTON_DUMP --- Report table
(SUBSCR_NO, ACCOUNT_NO, AREA_CODE, DEL_NO, CIRCLE, REGISTRATION_DT, EMAIL_ID, ALT_CNTCT_NO)
select crm.SUBSCR_NO, crm.ACCOUNT_NO, crm.AREA_CODE, crm.DEL_NO, crm.CIRCLE_ID,
aa.CREATED_DATE, aa.EMAIL_ID, aa.ALTERNATE_CONTACT
from MV_CRM_SUBS_DTLS crm, --- MView
(select /*+ ALL_ROWS */ A.ALTERNATE_CONTACT, A.CREATED_DATE, A.EMAIL_ID, B.SUBSCR_NO
from MCCI_PROFILE_DTLS a, MCCI_PROFILE_SUBSCR_DTLS b
where A.PROFILE_ID = B.PROFILE_ID
and B.ACE_STATUS = 'N'
) aa --- Join of two tables giviing me 18,00,000 recs
where crm.SUBSCR_NO = aa.SUBSCR_NO
and crm.SRVC_TYPE_ID = '125'
and crm.END_DT IS NULL;
INTERNET_METER_TABLE_PROC_1('MCCIPRD','MY_ACCOUNT_PHOTON_DUMP'); --- calling procedure to analyze the report table
COMMIT;
dbms_output.put_line(to_char(sysdate,'hh24:mi:ss'));
end; --- 12 min 04 secFor the rest of the 13 columns required i am running a block which has a FOR UPDATE cursor on the report table:
declare
cursor cur is
select SUBSCR_NO, ACCOUNT_NO, AREA_CODE, DEL_NO,
CIRCLE, REGISTRATION_DT, EMAIL_ID, ALT_CNTCT_NO
from MCCIPRD.MY_ACCOUNT_PHOTON_DUMP --where ACCOUNT_NO = 901237064
for update of
MRKT_SEGMNT, AON, ONLINE_PAY, PAID_AMNT, E_BILL, ECS, BILLED_AMNT,
SRVC_TAX, BILL_PLAN, USAGE_IN_MB, USAGE_IN_MIN, NO_OF_LOGIN, PHOTON_TYPE;
v_aon VARCHAR2(10) := NULL;
v_online_pay VARCHAR2(10) := NULL;
v_ebill VARCHAR2(10) := NULL;
v_mkt_sgmnt VARCHAR2(50) := NULL;
v_phtn_type VARCHAR2(50) := NULL;
v_login NUMBER(10) := 0;
v_paid_amnt VARCHAR2(50) := NULL;
v_ecs VARCHAR2(10) := NULL;
v_bill_plan VARCHAR2(100):= NULL;
v_billed_amnt VARCHAR2(10) := NULL;
v_srvc_tx_amnt VARCHAR2(10) := NULL;
v_usg_mb NUMBER(10) := NULL;
v_usg_min NUMBER(10) := NULL;
begin
dbms_output.put_line(to_char(sysdate,'hh24:mi:ss'));
for rec in cur loop
begin
select apps.TTL_GET_DEL_AON@MCCI_TO_PRD591(rec.ACCOUNT_NO, rec.DEL_NO, rec.CIRCLE)
into v_aon from dual;
exception
when others then
v_aon := 'NA';
end;
SELECT DECODE(COUNT(*),0,'NO','YES') into v_online_pay
FROM TTL_DESCRIPTIONS@MCCI_TO_PRD591
WHERE DESCRIPTION_CODE IN(SELECT DESCRIPTION_CODE FROM TTL_BMF_TRANS_DESCR@MCCI_TO_PRD591
WHERE BMF_TRANS_TYPE
IN (SELECT BMF_TRANS_TYPE FROM
TTL_BMF@MCCI_TO_PRD591 WHERE ACCOUNT_NO = rec.ACCOUNT_NO
AND POST_DATE BETWEEN
TO_DATE('01-'||TO_CHAR(SYSDATE,'MM-YYYY'),'DD-MM-YYYY') AND SYSDATE
AND DESCRIPTION_TEXT IN (select DESCRIPTION from fnd_lookup_values@MCCI_TO_PRD591 where
LOOKUP_TYPE='TTL_ONLINE_PAYMENT');
SELECT decode(count( *),0,'NO','YES') into v_ebill
FROM TTL_CUST_ADD_DTLS@MCCI_TO_PRD591
WHERE CUST_ACCT_NBR = rec.ACCOUNT_NO
AND UPPER(CUSTOMER_PREF_MODE) ='EMAIL';
begin
select ACC_SUB_CAT_DESC into v_mkt_sgmnt
from ttl_cust_dtls@MCCI_TO_PRD591 a, TTL_ACCOUNT_CATEGORIES@MCCI_TO_PRD591 b
where a.CUST_ACCT_NBR = rec.ACCOUNT_NO
and a.market_code = b.ACC_SUB_CAT;
exception
when others then
v_mkt_sgmnt := 'NA';
end;
begin
select nvl(sum(TRANS_AMOUNT),0) into v_paid_amnt
from ttl_bmf@MCCI_TO_PRD591
where account_no = rec.ACCOUNT_NO
AND POST_DATE
BETWEEN TO_DATE('01-'||TO_CHAR(SYSDATE,'MM-YYYY'),'DD-MM-YYYY')
AND SYSDATE;
exception
when others then
v_paid_amnt := 'NA';
end;
SELECT decode(count(1),0,'NO','YES') into v_ecs
from ts.Billdesk_Registration_MV@MCCI_TO_PRD591 where ACCOUNT_NO = rec.ACCOUNT_NO
and UPPER(REGISTRATION_TYPE ) = 'ECS';
SELECT decode(COUNT(*),0,'PHOTON WHIZ','PHOTON PLUS') into v_phtn_type
FROM ts.ttl_cust_ord_prdt_dtls@MCCI_TO_PRD591 A, ttl_product_mstr@MCCI_TO_PRD591 b
WHERE A.SUBSCRIBER_NBR = rec.SUBSCR_NO
and (A.prdt_disconnection_date IS NULL OR A.prdt_disconnection_date > SYSDATE )
AND A.prdt_disc_flag = 'N'
AND A.prdt_nbr = b.product_number
AND A.prdt_type_id = b.prouduct_type_id
AND b.first_level LIKE 'Feature%'
AND UPPER (b.product_desc) LIKE '%HSIA%';
SELECT count(1) into v_login
FROM MCCIPRD.MYACCOUNT_SESSION_INFO a
WHERE (A.DEL_NO = rec.DEL_NO or A.DEL_NO = ltrim(rec.AREA_CODE,'0')||rec.DEL_NO)
AND to_char(A.LOGIN_TIME,'Mon-YYYY') = to_char(sysdate-5,'Mon-YYYY');
begin
select PACKAGE_NAME, BILLED_AMOUNT, SERVICE_TAX_AMOUNT, USAGE_IN_MB, USAGE_IN_MIN
into v_bill_plan, v_billed_amnt, v_srvc_tx_amnt, v_usg_mb, v_usg_min from
(select rank() over(order by STATEMENT_DATE desc) rk,
PACKAGE_NAME, USAGE_IN_MB, USAGE_IN_MIN
nvl(BILLED_AMOUNT,'0') BILLED_AMOUNT, NVL(SRVC_TAX_AMNT,'0') SERVICE_TAX_AMOUNT
from MCCIPRD.MCCI_IM_BILLED_DATA
where (DEL_NUM = rec.DEL_NO or DEL_NUM = ltrim(rec.AREA_CODE,'0')||rec.DEL_NO)
and STATEMENT_DATE like '%'||to_char(SYSDATE,'Mon-YY')||'%')
where rk = 1;
exception
when others then
v_bill_plan := 'NA';
v_billed_amnt := '0';
v_srvc_tx_amnt := '0';
v_usg_mb := 0;
v_usg_min := 0;
end;
-- UPDATE THE DUMP TABLE --
update MCCIPRD.MY_ACCOUNT_PHOTON_DUMP
set MRKT_SEGMNT = v_mkt_sgmnt, AON = v_aon, ONLINE_PAY = v_online_pay, PAID_AMNT = v_paid_amnt,
E_BILL = v_ebill, ECS = v_ecs, BILLED_AMNT = v_billed_amnt, SRVC_TAX = v_srvc_tx_amnt,
BILL_PLAN = v_bill_plan, USAGE_IN_MB = v_usg_mb, USAGE_IN_MIN = v_usg_min, NO_OF_LOGIN = v_login,
PHOTON_TYPE = v_phtn_type
where current of cur;
end loop;
COMMIT;
dbms_output.put_line(to_char(sysdate,'hh24:mi:ss'));
exception when others then
dbms_output.put_line(SQLCODE||'::'||SQLERRM);
end;The report takes >6hrs. I know that most of the SELECT queries have ACCOUNT_NO as WHERE clause and can be joined, but when i joining few of these blocks with the initial INSERT query it was no better.
The individual queries within the cursor loop dont take more then 0.3 sec to execute.
I'm using the FOR UPDATE as i know that the report table is being used solely for this purpose.
Can somebody plz help me with this, i'm in desperate need of good advice here.
Thanks!!
Edited by: user11089213 on Aug 30, 2011 12:01 AM

Hi,
Below is the explain plan for the original query:
select /*+ ALL_ROWS */  crm.SUBSCR_NO, crm.ACCOUNT_NO, ltrim(crm.AREA_CODE,'0'), crm.DEL_NO, >crm.CIRCLE_ID
from MV_CRM_SUBS_DTLS crm,
        (select /*+ ALL_ROWS */  A.ALTERNATE_CONTACT, A.CREATED_DATE, A.EMAIL_ID, B.SUBSCR_NO
        from MCCIPRD.MCCI_PROFILE_DTLS a, MCCIPRD.MCCI_PROFILE_SUBSCR_DTLS b
        where A.PROFILE_ID = B.PROFILE_ID
        and   B.ACE_STATUS = 'N'
        ) aa
where crm.SUBSCR_NO    = aa.SUBSCR_NO
and   crm.SRVC_TYPE_ID = '125'
and   crm.END_DT IS NULL
| Id  | Operation              | Name                     | Rows  | Bytes |TempSpc| Cost (%CPU)| Time     |
|   0 | SELECT STATEMENT       |                          |  1481K|   100M|       |   245K  (5)| 00:49:09 |
|*  1 |  HASH JOIN             |                          |  1481K|   100M|    46M|   245K  (5)| 00:49:09 |
|*  2 |   HASH JOIN            |                          |  1480K|    29M|    38M| 13884   (9)| 00:02:47 |
|*  3 |    TABLE ACCESS FULL   | MCCI_PROFILE_SUBSCR_DTLS |  1480K|    21M|       |  3383  (13)| 00:00:41 |
|   4 |    INDEX FAST FULL SCAN| SYS_C002680              |  2513K|    14M|       |  6024   (5)| 00:01:13 |
|*  5 |   MAT_VIEW ACCESS FULL | MV_CRM_SUBS_DTLS_08AUG   |  1740K|    82M|       |   224K  (5)| 00:44:49 |
Predicate Information (identified by operation id):
   1 - access("CRM"."SUBSCR_NO"="B"."SUBSCR_NO")
   2 - access("A"."PROFILE_ID"="B"."PROFILE_ID")
   3 - filter("B"."ACE_STATUS"='N')
   5 - filter("CRM"."END_DT" IS NULL AND "CRM"."SRVC_TYPE_ID"='125')Whereas for the modified MView query, the plane remains the same:
select /*+ ALL_ROWS */ crm.SUBSCR_NO, crm.ACCOUNT_NO, ltrim(crm.AREA_CODE,'0'), crm.DEL_NO, >crm.CIRCLE_ID
from    (select * from MV_CRM_SUBS_DTLS
         where SRVC_TYPE_ID = '125'
         and   END_DT IS NULL) crm,
        (select /*+ ALL_ROWS */  A.ALTERNATE_CONTACT, A.CREATED_DATE, A.EMAIL_ID, B.SUBSCR_NO
        from MCCIPRD.MCCI_PROFILE_DTLS a, MCCIPRD.MCCI_PROFILE_SUBSCR_DTLS b
        where A.PROFILE_ID = B.PROFILE_ID
        and   B.ACE_STATUS = 'N'
        ) aa
where crm.SUBSCR_NO  = aa.SUBSCR_NO
| Id  | Operation              | Name                     | Rows  | Bytes |TempSpc| Cost (%CPU)| Time     |
|   0 | SELECT STATEMENT       |                          |  1481K|   100M|       |   245K  (5)| 00:49:09 |
|*  1 |  HASH JOIN             |                          |  1481K|   100M|    46M|   245K  (5)| 00:49:09 |
|*  2 |   HASH JOIN            |                          |  1480K|    29M|    38M| 13884   (9)| 00:02:47 |
|*  3 |    TABLE ACCESS FULL   | MCCI_PROFILE_SUBSCR_DTLS |  1480K|    21M|       |  3383  (13)| 00:00:41 |
|   4 |    INDEX FAST FULL SCAN| SYS_C002680              |  2513K|    14M|       |  6024   (5)| 00:01:13 |
|*  5 |   MAT_VIEW ACCESS FULL | MV_CRM_SUBS_DTLS_08AUG   |  1740K|    82M|       |   224K  (5)| 00:44:49 |
Predicate Information (identified by operation id):
   1 - access("CRM"."SUBSCR_NO"="B"."SUBSCR_NO")
   2 - access("A"."PROFILE_ID"="B"."PROFILE_ID")
   3 - filter("B"."ACE_STATUS"='N')
   5 - filter("CRM"."END_DT" IS NULL AND "CRM"."SRVC_TYPE_ID"='125')Also took your advice and tried to merge all the queries into single INSERT SQL, will be posting the results shortly.
Edited by: BluShadow on 30-Aug-2011 10:21
added {noformat}{noformat} tags.  Please read {message:id=9360002} to learn to do this yourself                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       

Similar Messages

  • Perl API: growing memory problem in loops over large sets of data

    Hi,
    When going through all XmlResults like this:
    while ($results->next($val)) {
    print $val->asString, "\n";
    The process size keeps growing. It does not when I comment $val->asString method out, but then I have no way of getting the results.
    This becomes a significant problem when the number of results is huge. I am doing this on a database of over a million short XML documents (400-800 bytes each).
    The more complete code is here:
    eval {
    $env = new DbEnv();
    $env->open($dbDir, Db::DB_JOINENV | Db::DB_INIT_LOCK
    | Db::DB_INIT_MPOOL | Db::DB_CREATE, 0);
    my $mgr = new XmlManager($env, DbXml::DBXML_ADOPT_DBENV);
    my $db = $mgr->openContainer(undef, $dbName, Db::DB_RDONLY);
    my $context = $mgr->createQueryContext(XmlQueryContext::LiveValues,
    XmlQueryContext::Lazy);
    my $lookup = $mgr->createIndexLookup($db, "", $nodeName,
    "node-$nodeType-equality-$syntax",
    new XmlValue($types{$nodeType}, $value), XmlIndexLookup::GTE);
    my $results = $lookup->execute(undef, $context);
    my $val = new XmlValue();
    while ($results->next($val)) {
    print $val->asString, "\n";
    if (my $e = catch std::exception) {
    die $e->what() . "\n";
    The process size just grows until the system limit is reached, then the process quits saying 'Out of memory'.
    I suspect the problem is with the std::string result returned by C++ XmlValue::asString() const.
    The (left-hand-side) result string is likely allocated by new std::string and receives the value by calling the string copy operator. Then the Perl scalar result is prepared, but when it gets returned to my code, the C++ string is not deleted.
    Moving the Sleepycat::XmlValue Perl object inside the loop does not help either:
    while ($results->hasNext()) {
    my $val = new XmlValue();
    $results->next($val);
    print $val->asString, "\n";
    In fact, the process seems to grow faster, possibly because the old $val instances do not get destroyed by Perl at the end of the loop. Where is Perl's garbage collection?
    I am using DB XML version: 2.2.13; BDB version: 4.4.20.2; OS: FreeBSD 6-STABLE. However the problem seems to be common for any OS or BDB XML version as it involves Perl-to-C++ interface.
    Has anyone experienced similar problems?
    Thanks,
    Konstantin.
    Konstantin @ Chuguev.com

    Good catch - you found a memory leak. Luckily the fix is very straightforward. Edit the file
    dbxml/src/perl/common.h
    and find this line
    #define newSVfromString(str) newSVpvn(str.c_str(), str.length())
    Change it to this
    #define newSVfromString(str) sv_2mortal(newSVpvn(str.c_str(), str.length()))
    and recompile the module.
    Paul

  • Best Practices for loading large sets of Data

    Just a general question regarding an initial load with a large set of data.
    Does it make any sense to use a materialized view to aid with load times for an initial load? Or do I simply let the query run for as long as it takes.
    Just looking for advice on what is the common approach here.
    Thanks!

    Hi GK,
    What I have normally seen is:
    1) Data would be extracted from APO Planning Area to APO Cube (FOR BACKUP purpose). Weekly or monthly, depending on how much data change you expect, or how critical it is for business. Backups are mostly monthly for DP.
    2) Data extracted from APO planning area directly to DSO of staging layer in BW, and then to BW cubes, for reporting.
    For DP monthly, SNP daily
    You can also use the option 1 that you mentioned below. In this case, the APO cube is the backup cube, while the BW cube is the one that you could use for reporting, and this BW cube gets data from APO cube.
    Benefit in this case is that we have to extract data from Planning Area only once. So, planning area is available for jobs/users for more time. However, backup and reporting extraction are getting mixed in this case, so issues in the flow could impact both the backup and the reporting. We have used this scenario recently, and yet to see the full impact.
    Thanks - Pawan

  • While erasing contact and setting my phone stop working showing round waiting circle and its not geting switched off also

    while erasing contact and setting my phone stop working showing round waiting circle and its not geting switched off also

    Try a Reset...
    Reset  ( No Data will be Lost )
    Press and hold the Sleep/Wake button and the Home button at the same time for at least ten seconds, until the Apple logo appears. Release the Buttons.
    http://support.apple.com/kb/ht1430
    If no joy...
    Connect to iTunes on the computer you usually Sync with and “ Restore “...
    http://support.apple.com/kb/HT1414
    If necessary Place the Device into Recovery mode...
    http://support.apple.com/kb/HT1808
    Note on Recovery Mode.
    You may need to try this More than Once...
    Be sure to Follow ALL the Steps...
    Once you have Recovered your Device...
    Re-Sync your Content or Restore from the most recent Backup...
    Restore from Backup
    http://support.apple.com/kb/ht1766

  • Problem to update very large volume of data for 2LIS_04* extr.

    Hi
    I have problem with jobs for 2LIS_04* extractors using Queued Delta.
    There are interface between R3 system and other production system and 3 or 4 times in the month very large volumen of data has been send to R3.
    Then job runs very long and not pull data to RSA7.
    How to resolve this problem.
    Our R3 system is PI_BASIS 2005_1_620.
    Thanks
    Adam

    U can check these SAP Notes..........it will help u........
    How can downtime be reduced for setup table update
    SAP Note Number: 753654
    Performance improvement for filling the setup tables
    SAP Note Number: 436393
    LBWE: Performance for setup of extract structures
    SAP Note Number: 437672

  • Problem while looping through record set and tem table for matching data

    hi I am using one record set and ane temp table and looping through both to find the match between dates.
    If date matches then it shud do some processing otherwise it will return default values(null values).
    FOR i IN student_rec .FIRST..student_rec .LAST          /*student_rec.school_date has 01-MAR-2012,02-MAR-2012,03-MAR-2012,04-MAR-2012,05-MAR-2012*/
    LOOP
    l_return_out.school_date := student_rec(i).school_date;
    l_return_out.marks_obtained := student_rec(i).marks_obtained;
    FOR i IN selected_dates .FIRST..selected_dates .LAST          /*selected_dates has 02-MAR-2012,03-MAR-2012,05-MAR-2012*/
    LOOP
    DBMS_OUTPUT.PUT_LINE(selected_dates(i));
    IF selected_dates(i)=student_rec(i).sett_date
    THEN
    EXIT;
    end if;
         ---------call procedure P1
    -----------get output as ret_val1               /*getting ret_val1 as 10 for 02-MAR-2012,03-MAR-2012,05-MAR-2012 */
         ----------call procedure P2
    ---------get ouput as ret_val2               /*getting ret_val1 as 20 for 02-MAR-2012,03-MAR-2012,05-MAR-2012 */
    if ret_val1>0 0r ret_val2>0
    then
    l_return_out.has_csts := yes;
    l_return_out.cst_present := 10;
    l_return_out.cst_absent := 20;
    else
    l_return_out.has_csts :=No;
    l_return_out.cst_present:= 0;
    l_return_out.cst_absent := 0;
    end if;
    end loop;
    l_return_out.has_cst := student_rec(i).has_csts;
    l_return_out.cst_missing := student_rec(i).cst_present;
    l_return_out.cst_existing := student_rec(i).cst_absent;
    PIPE ROW(l_return_out);
    END LOOP;
    RETURN ;
    I am expecting this as result
    school_date     marks_obtained     has_csts     cst_present cst_absent
    01-MAR-2012     20          
    02-MAR-2012     30          yes 10          20
    03-MAR-2012     40           yes 10          20
    04-MAR-2012     70          
    05-MAR-2012     60          yes 10          20
    but this as result
    school_date     marks_obtained     has_csts     cst_present cst_absent
    01-MAR-2012     20          
    02-MAR-2012     30          
    03-MAR-2012     40           
    04-MAR-2012     70          
    05-MAR-2012     60          
    Can anybody please highlight the mistake i am doing while processing the logic??
    Edited by: 942390 on Jul 13, 2012 8:44 PM
    Edited by: 942390 on Jul 13, 2012 8:45 PM

    I am getting a set values from a record set....student_rec
    and on pipelining this record set from 1st till last
    i am getting this
    school_date     marks_obtained     has_csts     cst_present cst_absent
    01-MAR-2012     20          
    02-MAR-2012     30          
    03-MAR-2012     40           
    04-MAR-2012     70          
    05-MAR-2012     60     
    so initially has_csts, cst_present and cst_absent is null for all dates.
    now have a temp table of selected_dates(which contains these dates 02-MAR-2012,03-MAR-2012,05-MAR-2012)
    now I am want to populate has_csts, cst_present and cst_absent with data only for those dates which is present in selected_dates temp table(02-MAR-2012,03-MAR-2012,05-MAR-2012) and that too has_csts, cst_present and cst_absent will be populated with some condition (based on the values from procedure got from P1 and P2).
    so want result set to look like
    school_date     marks_obtained     has_csts     cst_present cst_absent
    01-MAR-2012     20          
    02-MAR-2012     30          yes 10          20
    03-MAR-2012     40           yes 10          20
    04-MAR-2012     70          
    05-MAR-2012     60          yes 10          20
    so what could be the possible solution to obtained this....

  • Problem while using BCP utility for witing data in file

    hi all,
    I have a batch file in which I am using bcp command for reading data from MS SQL and writing it in delimiter file. Now there are some exceptions in MS SQL that while writing into file whenever it encounters new line character it switches to next line while writing and starts writing the rest of the data on next.
    Could you help me in getting rid of this problem. I wanted to replace the new line character with space.
    Thanks and regards
    Nitin

    Hi Dilip,
    Before going for any other table,
    As Kalnr is only one of the primary keys of table KEKO, You can try creating secondary index on KEKO, which might help in improving your report performance.
    Also, you can add more conditions in where clause if possible, which will also help in improving performance.
    Thansk,
    Archana

  • Problem while using KEKO(Product Costing - Header Data) table in the report

    hi,
    below is the slect query i have written.
    while accessing the table KEKO( Product Costing - Header Data ) .more time is taken.
    is there any alternative other than KEKO table when using in my report.
          SELECT FEH_STA VBELN POSNR FROM KEKO
               INTO TABLE IST_KEKO FOR ALL ENTRIES IN IST_VBAP_KEKO
               WHERE KALNR = IST_VBAP_KEKO-KALNR.
    regards,
    DILIP.

    Hi Dilip,
    Before going for any other table,
    As Kalnr is only one of the primary keys of table KEKO, You can try creating secondary index on KEKO, which might help in improving your report performance.
    Also, you can add more conditions in where clause if possible, which will also help in improving performance.
    Thansk,
    Archana

  • Problems of having a large table (columns and rows).

    hi people,
    can anyone give a list of problems that i will be facing when i have a large table(columns/rows). My table generate 5 lakhs record in a year and it keeps growing.
    if the answers is labourous, pls give the link of the web-site where i can download it.
    How to overcome it?
    Thanks in advance
    Ganapathy

    hi justin
    i understand u problem too.
    10 lakhs in Indian money = 1 million in the US.
    Iam trying to understand a system where there will be millions of record over a period of years. I felt that i need to address the problems that should be forseen before the system is developed(some thing like a priliminary investigation or feasibility study before taking up the project). So as of now i have no idea of the system, but do know that there will be millions of records. Iam trying to prepare a document that addresses these issues and how we are going to circumvent the issues and arrive at a solution.
    Thanks
    Ganapathy

  • Need Help - Problem while having a panelformLayout inside a Region.

    I have a jspx page which has some regions out of which one region has a panel form layout in it's corresponding jsff.
    Below is my jspx
    <?xml version='1.0' encoding='UTF-8'?>
    <jsp:root xmlns:jsp="http://java.sun.com/JSP/Page" version="2.1"
    xmlns:f="http://java.sun.com/jsf/core"
    xmlns:h="http://java.sun.com/jsf/html"
    xmlns:af="http://xmlns.oracle.com/adf/faces/rich"
    xmlns:pe="http://xmlns.oracle.com/adf/pageeditor"
    xmlns:cust="http://xmlns.oracle.com/adf/faces/customizable">
    <jsp:directive.page contentType="text/html;charset=UTF-8"/>
    <f:view>
    <af:document id="d1">
    <af:form id="f1">
    <af:pageTemplate viewId="/sdk/core/uifwk/template/templateDef.jspx"
    value="#{bindings.pageTemplateBinding}" id="emT">
    <f:facet name="emContent">
    <af:panelStretchLayout id="psl1" topHeight="auto">
    <!-- DO NOT REMOVE: This component is from Single Target Home Quick Start -->
    <f:facet name="center">
    <pe:pageCustomizable id="pageCustomizable1"
    toolbarLayout="message stretch addonpanels button">
    <af:panelStretchLayout startWidth="40%" endWidth="60%"
    id="ps12">
    <f:facet name="start">
    <af:panelGroupLayout layout="scroll" id="pgl1">
    <cust:panelCustomizable id="panelCustomizable1" showEditAction="false">
    <!--Add code here-->
    <cust:showDetailFrame id="sdf1" text="General">
    <af:region value="#{bindings.General1.regionModel}"
    id="r1"/>
    </cust:showDetailFrame>
    <cust:showDetailFrame id="sdf2" text="Features">
    <af:region value="#{bindings.Features1.regionModel}"
    id="r2"/>
    </cust:showDetailFrame>
    </cust:panelCustomizable>
    </af:panelGroupLayout>
    </f:facet>
    <f:facet name="end">
    <af:panelGroupLayout layout="scroll" id="pgl2">
    <cust:panelCustomizable id="panelCustomizable2" showEditAction="false">
    <!--Add code here-->
    <cust:showDetailFrame id="sdf4" text="Performance">
    <af:region value="#{bindings.Performance1.regionModel}"
    id="r4"/>
    </cust:showDetailFrame>
    </cust:panelCustomizable>
    </af:panelGroupLayout>
    </f:facet>
    </af:panelStretchLayout>
    </pe:pageCustomizable>
    </f:facet>
    </af:panelStretchLayout>
    </f:facet>
    </af:pageTemplate>
    </af:form>
    </af:document>
    </f:view>
    </jsp:root>
    Below is my jsff
    <?xml version='1.0' encoding='UTF-8'?>
    <jsp:root xmlns:jsp="http://java.sun.com/JSP/Page" version="2.1"
    xmlns:af="http://xmlns.oracle.com/adf/faces/rich"
    xmlns:f="http://java.sun.com/jsf/core">
    <!-- DO NOT REMOVE: EM templateDef Quick Start -->
    <af:panelFormLayout id="pfl1">
    <af:panelLabelAndMessage rendered="#{pageFlowScope.PsGeneral.isDisplayDomain}"
    id="plam2" label="Domain" for="aot2">
    <af:activeOutputText value="#{pageFlowScope.PsGeneral.DOMAIN}" id="aot2"/>
    </af:panelLabelAndMessage>
    <af:panelLabelAndMessage rendered="#{pageFlowScope.PsGeneral.isDisplayDbName}"
    id="plam3" label="Database Name" for="aot3">
    <af:activeOutputText value="#{pageFlowScope.PsGeneral.dbName}" id="aot3"/>
    </af:panelLabelAndMessage>
    <af:panelLabelAndMessage rendered="#{pageFlowScope.PsGeneral.isDisplayDbType}"
    id="plam4" label="Database Type" for="aot4">
    <af:activeOutputText value="#{pageFlowScope.PsGeneral.dbType}" id="aot4"/>
    </af:panelLabelAndMessage>
    <af:panelLabelAndMessage rendered="#{pageFlowScope.PsGeneral.isDisplayPsHome}"
    id="plam5" label="PS_HOME" for="aot5">
    <af:activeOutputText value="#{pageFlowScope.PsGeneral.PSHOME}" id="aot5"/>
    </af:panelLabelAndMessage>
    <af:panelLabelAndMessage rendered="#{pageFlowScope.PsGeneral.isDisplayPsCfgHome}"
    id="plam6" label="PS_CFG_HOME" for="aot6">
    <af:activeOutputText value="#{pageFlowScope.PsGeneral.PSCFGHOME}"
    id="aot6"/>
    </af:panelLabelAndMessage>
    <af:panelLabelAndMessage rendered="#{pageFlowScope.PsGeneral.isDisplayAppDb}"
    id="plam7" label="PS Application Database"
    for="gl1">
    <af:goLink rendered="#{pageFlowScope.PsGeneral.isDisplayAppDb}"
    destination="#{pageFlowScope.PsGeneral.appDbURL}"
    shortDesc="#{pageFlowScope.PsGeneral.appDbToolTip}"
    text="#{pageFlowScope.PsGeneral.appDbName}" id="gl1"/>
    </af:panelLabelAndMessage>
    Like this there are 23 Panel Label and Message components .
    <af:panelLabelAndMessage id="plam23" label="Host" for="gl3">
    <af:goLink destination="#{pageFlowScope.PsGeneral.hostURL}"
    text="#{pageFlowScope.PsGeneral.hostName}" id="gl3"/>
    </af:panelLabelAndMessage>
    </af:panelFormLayout>
    </jsp:root>
    When the page is rendered in the browser only the first 5 panelLabelAndMessage elements are rendered and the page is broken after that. There is no problem in the code behind, in whatever order you place the pLAM elements the first five rows are rendered and the page is broken with the following log
    Caused by: java.lang.IndexOutOfBoundsException: Index: 5, Size: 5
    at java.util.ArrayList.RangeCheck(ArrayList.java:547)
    at java.util.ArrayList.get(ArrayList.java:322)
    at oracle.adfinternal.view.faces.renderkit.rich.PanelFormLayoutRenderer$FormColumnEncoder.processComponent(PanelFormLayoutRenderer.java:1425)
    at oracle.adfinternal.view.faces.renderkit.rich.PanelFormLayoutRenderer$FormColumnEncoder.processComponent(PanelFormLayoutRenderer.java:1410)
    at org.apache.myfaces.trinidad.component.UIXComponent.processFlattenedChildren(UIXComponent.java:170)
    at org.apache.myfaces.trinidad.component.UIXComponent.processFlattenedChildren(UIXComponent.java:290)
    at org.apache.myfaces.trinidad.component.UIXComponent.encodeFlattenedChildren(UIXComponent.java:255)
    at oracle.adfinternal.view.faces.renderkit.rich.PanelFormLayoutRenderer._encodeChildren(PanelFormLayoutRenderer.java:352)
    at oracle.adfinternal.view.faces.renderkit.rich.PanelFormLayoutRenderer.encodeAll(PanelFormLayoutRenderer.java:187)
    at oracle.adf.view.rich.render.RichRenderer.encodeAll(RichRenderer.java:1431)
    at org.apache.myfaces.trinidad.render.CoreRenderer.encodeEnd(CoreRenderer.java:341)
    at org.apache.myfaces.trinidad.component.UIXComponentBase.encodeEnd(UIXComponentBase.java:767)
    at javax.faces.component.UIComponent.encodeAll(UIComponent.java:937)
    at org.apache.myfaces.trinidad.render.CoreRenderer.encodeChild(CoreRenderer.java:405)
    If i remove the "panelFormLayout" from the jsff and try, all the elements are appearing but without the alignment. So clearly the problem is in 'panelFormLayout'.
    Is there anything wrong in the jspx violationg the layout rules. Can someone help me in resolving this issue?
    Regards,
    Balakrishnan

    Thanks Timo for the reply.
    JDev Version : JDev 11.1.1.6
    Yes this is a Webcenter page.
    I don't understand what is that plain ADF. Because in all the pages we build, we use these kind of tags. I have used the same panelFormLayout in the jspx and it work's fine for any number of rows. Only when used inside the region the problem occured.
    Regards,
    Balakrishnan

  • Interfacing a large set of data mysql

    My Flex app is currently interfacing a mySQL table of about
    3Million rows. Flex is used just to view and slice the data;
    nothing is ever written back to the database.
    We just moved from e4x to using amfphp and experienced a good
    performance increase but still performance is quite slow. Paging or
    "splitting" is not a real option since the charts and tables on the
    Flex front are already aggregating.
    What do you think is the fastest way for Flex to read a MYsql
    database? Is amfphp the best method or am I missing a trick? I
    presume there is a much better way but just need a nudge in the
    right direction!

    My Flex app is currently interfacing a mySQL table of about
    3Million rows. Flex is used just to view and slice the data;
    nothing is ever written back to the database.
    We just moved from e4x to using amfphp and experienced a good
    performance increase but still performance is quite slow. Paging or
    "splitting" is not a real option since the charts and tables on the
    Flex front are already aggregating.
    What do you think is the fastest way for Flex to read a MYsql
    database? Is amfphp the best method or am I missing a trick? I
    presume there is a much better way but just need a nudge in the
    right direction!

  • Problem while creating with QUAN and CURR data type fields

    I am trying to create a Z table. A couple of fields are of data type QUAN and CURR, which need to have reference table and reference field.
    Field               DataType     Length     Decimal
    TOT_QTY       QUAN            15                 3
    PRICE              CURR             16                2
    For reference table and fields, I found a Table HRPAD23 which has a field called N_QUANTITY (15,3) that matches my TOT_QTY field's data type and it also has a field called UN_PRICE that matches my PRICE field's data type. But when I use them as reference, and try to activate the table, I get an error "combination reference table/field does not exist". How do I fix this? Can anybody help me? Thanks in advance.
    Chris
    Edited by: martin99 on Aug 18, 2011 9:16 PM

    Martin,
    I doubt that you need to add 2 extra fields in your table as suggested in the last post by John.
    Your error is very CLEAR that you seem to be use wrong combination of reference table and field.
    For QTY field,
    use ref table  -  HRPAD23.              ref field- UNIT
    OR
    ref table - VBAP                       ref field - VRKME
    For PRICE field,
    use ref table - PAD25                  ref field - KWAER
    OR
    ref table - T77REFDOC                                     ref field - CURRENCY
    It should work.
    BR,
    Diwakar

  • Problem while launching Webdypro Application on click of a work item in UWL

    Hi All,
    I am trying to launch a webdynpro application by clicking one of the work items in UWL. But I am getting the error as "iview N/A".
    It is trying to use the UWL Launch Webdynpro iview(with the id com.sap.netweaver.bc.uwl.uwlLaunchWebdynpro)
    I searched for the above iview,I could see all the UWL related iviews at the following location(Portal content->Content Provided by SAP->End user content->Standard Portal users->iviews ->com.sap.netweaver.bc.uwl.uwliviews)
    I could find UWL Launch SAP BSP and UWL Launch SAP Transaction and few more,but could n't find UWL Launch Webdynpro iview.
    Can any one let me know wat do I need to do,to get that iview.
    I am working on EP6 & KMC SP19.
    Thanks for the help
    Regards,
    Santhosh

    Hi Santosh
    One thing is clear that you can see your task in the UWL, If your SWFVISU is entry is correct (especially the System Alias ), the only reason i can see is the XML is not imported properly.
    Login as administrator and navigate to
    System Administration->System Configuration->Universal Worklist & Workflow->
    Universal Worklist Administration
    Re-Register your System again.
    and click this link on the same page "Click to Administrate Item Types and View Definitions"
    There will be an XML imported, just check your Task Number is present and check
    the application, if exists should not be a problem.
    If you see the SAP Standard ESS workflows, all webdynpro applicaitons are directly called by specifying the webdynpro component name, not as an iView.
    I think you are using Portal iView to display your webdynpro, try to use the webdynpro applicaiton as it is.
    Regards
    Abhimanyu L

  • I am having problem while using ms word with anjal( OS X Lion 10.7.5 (11G63))

    i am having problem while using ms word with anjal  is not working properly but its working in facebook and other areas.i need and  its very important to using tamil fonts for my job.can anyone help me out from this problem.
    thanking you
    by
    moses

    Those who are knowledgable in this area (myself not among them) say that Word's support for Asian languages is faulty. The best word processor for that use is "Mellel."

  • Problem while Binding multiple Parameters to View Object[Solved]

    Hello,
    I am facing problem while binding multiple parameters with different data types in View Object query. For example suppose I have following query in my view object.
    SELECT Header.ADDED_BY
    Header.BATCH_ID,
    FROM BATCH_HEADER Header
    WHERE :1='deptAdmin' and Header.BATCH_ID
    in
    select batch_id from batch_header_dept_mapping where dept_id in(SELECT * FROM TABLE(CAST(:0 AS TABLE_OF_VARCHAR)))
    I am able to pass the Bind variables of Array type for : 0 , using Steve's ArrayOfStringDomain example. (ArrayOfStringDomain) .
    But after passing value to second bind parameter ie.. :1 .
    I am getting the error as follows.
    ## Detail 0 ##
    java.sql.SQLSyntaxErrorException: ORA-00932: inconsistent datatypes: expected - got CHAR.
    I tried to set
    setWhereClauseParam(1,11); // 11 is Number
    setWhereClauseParam(0,arr); // arr is arr = new Array(descriptor,conn,deptid); for in parameter.
    But of no use , Please let me know if any thing missing form me or have any another solutions. Also please provide me any example if have.
    Thank you,
    Sandeep
    Edited by: user11187811 on Oct 23, 2009 7:27 AM
    Edited by: user11187811 on Oct 26, 2009 12:52 AM
    Edited by: user11187811 on Oct 26, 2009 6:51 AM

    hi.
    but when using non-Oracle named parameter binding styles as you've done (ie. , :1), regardless of what number you give each bind variable, they are sequenced 0, 1, 2 etc. As such your bind variable :1 is the 0th parameter, and your bind variable  is the 1st parameter.Your statment is correct.
    :1 i used was actually on 0th position and :0 was on 1 position. Like you said in sequence 0,1,2 etc. Now i get the answer and i corrected My mistake by assigning right values to right binding variable. and problem just solve.
    Thanks Chris.

Maybe you are looking for