Exception while updating a clob value using dbms_lob.fragment_insert

Hi,
Here is the query
procedure tempProc(str3 in varchar2) is
QI CLOB;
-- LOB(QI) STORE AS securefile ;
v_cursor refcursor;
str varchar2(50);
i number default '30';
begin
open v_cursor for select b.OCEAN_RATE_XML.getClobVal() from TNMAB_OCEAN_RATE_XML B
WHERE (XMLCast(XMLQuery('declare default element namespace "http://com.oocl.schema.tnm.agreementbuilder"; (: :) /OceanOfferRate/ObjectID'
PASSING B.OCEAN_RATE_XML RETURNING CONTENT) AS NUMBER(20))) = 200000000000050;
fetch v_cursor into QI;
close v_cursor;
dbms_output.put_line('abcds'||DBMS_LOB.getlength(QI));
dbms_lob.fragment_insert(QI,3,DBMS_LOB.getlength(QI)-17,'<abc/>');
dbms_output.put_line('Done insert');
DBMS_LOB.READ (QI, i, DBMS_LOB.getlength(QI)-i, str);
--DBMS_LOB.READ(QI,20,DBMS_LOB.getlength(QI)-20,str);
dbms_output.put_line('Doem read');
dbms_output.put_line(str);
end tempProc;
I am getting the below exceptionError report:
ORA-43856: Unsupported LOB type for SECUREFILE LOB operation
ORA-06512: at "SYS.DBMS_LOB", line 1076
ORA-06512: at "TNM_PLSQL.TNM_AB_QI_UPDT_PKG", line 377
ORA-06512: at line 5
ORA-06512: at line 9
<OceanOfferRate xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns="http://com.oocl.schema.tnm.agreementbuilder">
<ObjectID>200000000000050</ObjectID>
<RateID>2</RateID>
<AgreementID>00000002</AgreementID>
<StartingVersion>0</StartingVersion>
<EndingVersion>0</EndingVersion>
<EffectiveFrom>2010-05-16T00:00:00.000000</EffectiveFrom>
<EffectiveTo>2010-06-30T00:00:00.000000</EffectiveTo>
<RateStatus>QuoteExpired</RateStatus>
<ApprovalStatus>Approved</ApprovalStatus>
<Flags>3</Flags>
<Tariffs>015</Tariffs>
<BaseRates>
<SizeType>20GP</SizeType>
<Amount>65</Amount>
<Currency>USD</Currency>
<EffectiveFrom>2010-05-16T00:00:00.000000</EffectiveFrom>
<EffectiveTo>2010-06-30T00:00:00.000000</EffectiveTo>
<Flags>0</Flags>
</BaseRates>
<BaseRates>
<SizeType>40GP</SizeType>
<Amount>100</Amount>
<Currency>USD</Currency>
<EffectiveFrom>2010-05-16T00:00:00.000000</EffectiveFrom>
<EffectiveTo>2010-06-30T00:00:00.000000</EffectiveTo>
<Flags>0</Flags>
</BaseRates>
<BaseRates>
<SizeType>40HQ</SizeType>
<Amount>100</Amount>
<Currency>USD</Currency>
<EffectiveFrom>2010-05-16T00:00:00.000000</EffectiveFrom>
<EffectiveTo>2010-06-30T00:00:00.000000</EffectiveTo>
<Flags>0</Flags>
</BaseRates>
<ShippingPartyGroupName>Default</ShippingPartyGroupName>
<CustomerContactGroupName>Default</CustomerContactGroupName>
<NamedCustomerGroupName>Default</NamedCustomerGroupName>
<LastSentDate>2010-06-14T22:42:48.536000</LastSentDate>
<ValidityDays>30</ValidityDays>
<ValidityExpirationDate>2010-06-30T00:00:00.000000</ValidityExpirationDate>
<GuidelineRateReference>
<Trunk>
<RateID>100001510668470</RateID>
<SurchargeIDs>684172719752758</SurchargeIDs>
<SurchargeIDs>626856918161564</SurchargeIDs>
<SurchargeIDs>680028613161439</SurchargeIDs>
<SurchargeIDs>679555629913553</SurchargeIDs>
<SurchargeIDs>673382151283681</SurchargeIDs>
<SurchargeIDs>679789705628181</SurchargeIDs>
<SurchargeIDs>653835218283772</SurchargeIDs>
<SurchargeIDs>653955477367768</SurchargeIDs>
<SurchargeIDs>653955477367759</SurchargeIDs>
</Trunk>
</GuidelineRateReference>
<CreatedOn>2010-05-16T13:40:29.201344</CreatedOn>
<CreatedBy>KRISHSA</CreatedBy>
<SalesPerson>COOKBR</SalesPerson>
<SalesOffice>PHE</SalesOffice>
<LastUpdated>2010-06-30T00:13:06.000000</LastUpdated>
<LastUpdatedBy>TNM_APPLN</LastUpdatedBy>
<Origins>
<ID_Wrappers>
<Value>100000000026067</Value>
</ID_Wrappers>
<ID_Wrappers>
<Value>100000000008923</Value>
</ID_Wrappers>
<ID_Wrappers>
<Value>100000000024173</Value>
</ID_Wrappers>
</Origins>
<Destinations>
<ID_Wrappers>
<Value>100000000008923</Value>
</ID_Wrappers>
<ID_Wrappers>
<Value>100000000024173</Value>
</ID_Wrappers>
<ID_Wrappers>
<Value>100000000013006</Value>
</ID_Wrappers>
<ID_Wrappers>
<Value>100000000046704</Value>
</ID_Wrappers>
</Destinations>
<DeliveryMode>YY</DeliveryMode>
<TradeLane>IAT</TradeLane>
<Commodity>
<Description>Cotton for test</Description>
<CargoNatureGroup>32</CargoNatureGroup>
</Commodity>
<RateLevel>1</RateLevel>
</OceanOfferRate>
Any advise if there is any other ways to update the clob with additional information at the end of the XML before </OceanOfferRate> ?

A quick example illustrating what I mean :
Settings
SQL> create table department_xml (
  2    xmldoc xmltype
  3  , depid  number(4) as
  4    (
  5      xmlcast(
  6        xmlquery('declare default element namespace "http://some.namespace.org"; (: :)
  7                  /Department/@ID' passing xmldoc returning content)
  8        as number(4)
  9      )
10    ) virtual
11  )
12  xmltype xmldoc store as binary xml
13  ;
Table created.
SQL> create unique index department_xml_depid_uix on department_xml (depid);
Index created.
SQL> insert into department_xml (xmldoc)
  2  select xmlelement("Department",
  3           xmlattributes('http://some.namespace.org' as "xmlns"
  4                        , department_id as "ID")
  5         , xmlforest(
  6             d.department_name as "departmentName"
  7           , d.manager_id as "managerID"
  8           , xmlforest(
  9               l.street_address as "street"
10             , l.postal_code as "zipcode"
11             , l.city as "city"
12             , l.state_province as "state"
13             , c.country_name as "country"
14             ) as "address"
15           )
16         )
17  from hr.departments d
18       join hr.locations l on l.location_id = d.location_id
19       join hr.countries c on c.country_id = l.country_id
20  ;
27 rows created.
SQL> commit;
Commit complete.
Sample document
SQL> select xmlserialize(document xmldoc as clob indent) from department_xml where depid = 50;
XMLSERIALIZE(DOCUMENTXMLDOCASCLOBINDENT)
<Department xmlns="http://some.namespace.org" ID="50">
  <departmentName>Shipping</departmentName>
  <managerID>121</managerID>
  <address>
    <street>2011 Interiors Blvd</street>
    <zipcode>99236</zipcode>
    <city>South San Francisco</city>
    <state>California</state>
    <country>United States of America</country>
  </address>
</Department>
Updating each doc with the list of employees from the corresponding department
SQL> UPDATE department_xml d
  2  SET d.xmldoc =
  3      insertChildXMLBefore(
  4        d.xmldoc
  5      , '/Department'
  6      , 'address'
  7      , (
  8          select xmlelement("employees",
  9                   xmlagg(
10                     xmlelement("employee",
11                       xmlattributes(e.employee_id as "ID")
12                     , xmlforest( e.first_name || ' ' || e.last_name as "employeeName"
13                                , e.hire_date as "hireDate"
14                                , e.salary as "salary" )
15                     )
16                     order by e.employee_id
17                   )
18                 )
19          from hr.employees e
20          where e.department_id = d.depid
21        )
22      , 'xmlns="http://some.namespace.org"'
23      )
24  ;
27 rows updated.
Checking...
SQL> select xmlserialize(document xmldoc as clob indent) from department_xml where depid = 50;
XMLSERIALIZE(DOCUMENTXMLDOCASCLOBINDENT)
<Department xmlns="http://some.namespace.org" ID="50">
  <departmentName>Shipping</departmentName>
  <managerID>121</managerID>
  <employees>
    <employee ID="120">
      <employeeName>Matthew Weiss</employeeName>
      <hireDate>2004-07-18</hireDate>
      <salary>8000</salary>
    </employee>
    <employee ID="121">
      <employeeName>Adam Fripp</employeeName>
      <hireDate>2005-04-10</hireDate>
      <salary>8200</salary>
    </employee>
    <employee ID="122">
      <employeeName>Payam Kaufling</employeeName>
      <hireDate>2003-05-01</hireDate>
      <salary>7900</salary>
    </employee>
    <employee ID="123">
      <employeeName>Shanta Vollman</employeeName>
      <hireDate>2005-10-10</hireDate>
      <salary>6500</salary>
    </employee>
    <employee ID="124">
      <employeeName>Kevin Mourgos</employeeName>
      <hireDate>2007-11-16</hireDate>
      <salary>5800</salary>
    </employee>
    <employee ID="125">
      <employeeName>Julia Nayer</employeeName>
      <hireDate>2005-07-16</hireDate>
      <salary>3200</salary>
    </employee>
    <employee ID="126">
      <employeeName>Irene Mikkilineni</employeeName>
      <hireDate>2006-09-28</hireDate>
      <salary>2700</salary>
    </employee>
    <employee ID="127">
      <employeeName>James Landry</employeeName>
      <hireDate>2007-01-14</hireDate>
      <salary>2400</salary>
    </employee>
    <employee ID="128">
      <employeeName>Steven Markle</employeeName>
      <hireDate>2008-03-08</hireDate>
      <salary>2200</salary>
    </employee>
    <employee ID="129">
      <employeeName>Laura Bissot</employeeName>
      <hireDate>2005-08-20</hireDate>
      <salary>3300</salary>
    </employee>
    <employee ID="130">
      <employeeName>Mozhe Atkinson</employeeName>
      <hireDate>2005-10-30</hireDate>
      <salary>2800</salary>
    </employee>
    <employee ID="131">
      <employeeName>James Marlow</employeeName>
      <hireDate>2005-02-16</hireDate>
      <salary>2500</salary>
    </employee>
    <employee ID="132">
      <employeeName>TJ Olson</employeeName>
      <hireDate>2007-04-10</hireDate>
      <salary>2100</salary>
    </employee>
    <employee ID="133">
      <employeeName>Jason Mallin</employeeName>
      <hireDate>2004-06-14</hireDate>
      <salary>3300</salary>
    </employee>
    <employee ID="134">
      <employeeName>Michael Rogers</employeeName>
      <hireDate>2006-08-26</hireDate>
      <salary>2900</salary>
    </employee>
    <employee ID="135">
      <employeeName>Ki Gee</employeeName>
      <hireDate>2007-12-12</hireDate>
      <salary>2400</salary>
    </employee>
    <employee ID="136">
      <employeeName>Hazel Philtanker</employeeName>
      <hireDate>2008-02-06</hireDate>
      <salary>2200</salary>
    </employee>
    <employee ID="137">
      <employeeName>Renske Ladwig</employeeName>
      <hireDate>2003-07-14</hireDate>
      <salary>3600</salary>
    </employee>
    <employee ID="138">
      <employeeName>Stephen Stiles</employeeName>
      <hireDate>2005-10-26</hireDate>
      <salary>3200</salary>
    </employee>
    <employee ID="139">
      <employeeName>John Seo</employeeName>
      <hireDate>2006-02-12</hireDate>
      <salary>2700</salary>
    </employee>
    <employee ID="140">
      <employeeName>Joshua Patel</employeeName>
      <hireDate>2006-04-06</hireDate>
      <salary>2500</salary>
    </employee>
    <employee ID="141">
      <employeeName>Trenna Rajs</employeeName>
      <hireDate>2003-10-17</hireDate>
      <salary>3500</salary>
    </employee>
    <employee ID="142">
      <employeeName>Curtis Davies</employeeName>
      <hireDate>2005-01-29</hireDate>
      <salary>3100</salary>
    </employee>
    <employee ID="143">
      <employeeName>Randall Matos</employeeName>
      <hireDate>2006-03-15</hireDate>
      <salary>2600</salary>
    </employee>
    <employee ID="144">
      <employeeName>Peter Vargas</employeeName>
      <hireDate>2006-07-09</hireDate>
      <salary>2500</salary>
    </employee>
    <employee ID="180">
      <employeeName>Winston Taylor</employeeName>
      <hireDate>2006-01-24</hireDate>
      <salary>3200</salary>
    </employee>
    <employee ID="181">
      <employeeName>Jean Fleaur</employeeName>
      <hireDate>2006-02-23</hireDate>
      <salary>3100</salary>
    </employee>
    <employee ID="182">
      <employeeName>Martha Sullivan</employeeName>
      <hireDate>2007-06-21</hireDate>
      <salary>2500</salary>
    </employee>
    <employee ID="183">
      <employeeName>Girard Geoni</employeeName>
      <hireDate>2008-02-03</hireDate>
      <salary>2800</salary>
    </employee>
    <employee ID="184">
      <employeeName>Nandita Sarchand</employeeName>
      <hireDate>2004-01-27</hireDate>
      <salary>4200</salary>
    </employee>
    <employee ID="185">
      <employeeName>Alexis Bull</employeeName>
      <hireDate>2005-02-20</hireDate>
      <salary>4100</salary>
    </employee>
    <employee ID="186">
      <employeeName>Julia Dellinger</employeeName>
      <hireDate>2006-06-24</hireDate>
      <salary>3400</salary>
    </employee>
    <employee ID="187">
      <employeeName>Anthony Cabrio</employeeName>
      <hireDate>2007-02-07</hireDate>
      <salary>3000</salary>
    </employee>
    <employee ID="188">
      <employeeName>Kelly Chung</employeeName>
      <hireDate>2005-06-14</hireDate>
      <salary>3800</salary>
    </employee>
    <employee ID="189">
      <employeeName>Jennifer Dilly</employeeName>
      <hireDate>2005-08-13</hireDate>
      <salary>3600</salary>
    </employee>
    <employee ID="190">
      <employeeName>Timothy Gates</employeeName>
      <hireDate>2006-07-11</hireDate>
      <salary>2900</salary>
    </employee>
    <employee ID="191">
      <employeeName>Randall Perkins</employeeName>
      <hireDate>2007-12-19</hireDate>
      <salary>2500</salary>
    </employee>
    <employee ID="192">
      <employeeName>Sarah Bell</employeeName>
      <hireDate>2004-02-04</hireDate>
      <salary>4000</salary>
    </employee>
    <employee ID="193">
      <employeeName>Britney Everett</employeeName>
      <hireDate>2005-03-03</hireDate>
      <salary>3900</salary>
    </employee>
    <employee ID="194">
      <employeeName>Samuel McCain</employeeName>
      <hireDate>2006-07-01</hireDate>
      <salary>3200</salary>
    </employee>
    <employee ID="195">
      <employeeName>Vance Jones</employeeName>
      <hireDate>2007-03-17</hireDate>
      <salary>2800</salary>
    </employee>
    <employee ID="196">
      <employeeName>Alana Walsh</employeeName>
      <hireDate>2006-04-24</hireDate>
      <salary>3100</salary>
    </employee>
    <employee ID="197">
      <employeeName>Kevin Feeney</employeeName>
      <hireDate>2006-05-23</hireDate>
      <salary>3000</salary>
    </employee>
    <employee ID="198">
      <employeeName>Donald OConnell</employeeName>
      <hireDate>2007-06-21</hireDate>
      <salary>2600</salary>
    </employee>
    <employee ID="199">
      <employeeName>Douglas Grant</employeeName>
      <hireDate>2008-01-13</hireDate>
      <salary>2600</salary>
    </employee>
  </employees>
  <address>
    <street>2011 Interiors Blvd</street>
    <zipcode>99236</zipcode>
    <city>South San Francisco</city>
    <state>California</state>
    <country>United States of America</country>
  </address>
</Department>

Similar Messages

  • CIN: Items are not displayed while updating the RG1 Register using T Code:

    Hi Experts,
    Items are not displayed while updating the RG1 Register using T Code: J1I5.
    System is not showing any error message.
    Can any one explain to update the RG1 Register?
    Thanks
    Chandra

    Hi,
    dont go the table again and again . the table is updated at intial stage only for stock update but not when SAP is running.
    In day to day bussiness the flow is as follows:
    Production - MB31 - to Unres stock
    Sales - VL02n - PGI - to customer - i.e out of unres stock
    Every day u can run TC J1i5 once in a day as decided and update the register so all transactions which have happend in that day gets updated i.e both input and output.
    and this will happen every day.
    hence dont use table as it is only for intial data upload i.e just before golive.
    Hope i am clear.
    krishna

  • JDBC SQL Exception while updating an SQL database

    Hi Experts,
    Recently i have encountered a JDBC SQL exception while updating aSQL database as below,
    com.sap.aii.af.ra.ms.api.DeliveryException: Error processing request in sax parser: Error when executing statement for table/stored proc. 'SAP_UPLOAD' (structure 'Statement'): java.sql.SQLException: No more data to read from socket
    the above system error didn't occur for all messages (so far happened twice), but i need to find an fix to overcome this system error.
    did anybody came across the above system error, please help me to resolve.
    Thanks

    Hi bandana,
    From SAP Note --> 831162
    17. Receiver: java.sql.SQLException During First Message Processing
    Q: When sending a message to a JDBC receiver channel for the first time after an extended inactivity period, I am observing a java.sql.SQLException in the adapter's processing, which refers to a closed connection or a timeout? What is causing this and how do I work around this problem?
    A: The database server has apparently closed the adapter's JDBC connection from the server side. Nevertheless, the message should be processed successfully during the next retry. If you want to completely eliminate the symptoms, enable the setting "Advanced Mode" -> "Disconnect from database after each message processing". Note that this might have a negative performance impact for high-volume processing.
    I would recommend you to go through the complete note !!! Very Informative .
    Regards,

  • Error While updating Process form data Using Scheduler

    Hi All,
    I am trying to update Process form data (ex : lastname) using a scheduled task Code. I am getting Error while updating Field.
    Code :
    HashMap<String, String> map = new HashMap<String, String>();
    map.put("UD_EBS_PF_LASTNAME", "lastname");
    formintf.setProcessFormData(instancekey, map);  //I AM GETTING AT THIS LINE
    Saying
    Thor.API.Exceptions.tcAPIException: The following required fields have not been given values:EBS IT Resource : The following required fields have not been given values:EBS IT Resource
        at weblogic.rjvm.ResponseImpl.unmarshalReturn(ResponseImpl.java:234)
        at weblogic.rmi.cluster.ClusterableRemoteRef.invoke(ClusterableRemoteRef.java:348)
        at weblogic.rmi.cluster.ClusterableRemoteRef.invoke(ClusterableRemoteRef.java:259)
        at Thor.API.Operations.tcFormInstanceOperationsIntfEJB_h6wb8n_tcFormInstanceOperationsIntfRemoteImpl_1036_WLStub.setProcessFormDatax(Unknown Source)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at weblogic.ejb.container.internal.RemoteBusinessIntfProxy.invoke(RemoteBusinessIntfProxy.java:85)
        at $Proxy2.setProcessFormDatax(Unknown Source)
        at Thor.API.Operations.tcFormInstanceOperationsIntfDelegate.setProcessFormData(Unknown Source)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at Thor.API.Base.SecurityInvocationHandler$1.run(SecurityInvocationHandler.java:68)
        at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:321)
        at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:120)
        at weblogic.security.Security.runAs(Security.java:41)
        at Thor.API.Security.LoginHandler.weblogicLoginSession.runAs(weblogicLoginSession.java:52)
        at Thor.API.Base.SecurityInvocationHandler.invoke(SecurityInvocationHandler.java:79)
        at $Proxy3.setProcessFormData(Unknown Source)
        at com.wyndham.tasks.AssignRandomPasswordToAllUsersSchedulerTest.execute(AssignRandomPasswordToAllUsersSchedulerTest.java:182)
        at com.wyndham.tasks.AssignRandomPasswordToAllUsersSchedulerTest.main(AssignRandomPasswordToAllUsersSchedulerTest.java:63)
    Caused by: Thor.API.Exceptions.tcAPIException: The following required fields have not been given values:EBS IT Resource : The following required fields have not been given values:EBS IT Resource
        at com.thortech.xl.ejb.beansimpl.tcFormInstanceOperationsBean.setProcessFormData(tcFormInstanceOperationsBean.java:761)
        at com.thortech.xl.ejb.beansimpl.tcFormInstanceOperationsBean.setProcessFormData(tcFormInstanceOperationsBean.java:426)
        at Thor.API.Operations.tcFormInstanceOperationsIntfEJB.setProcessFormDatax(Unknown Source)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)

    Is that possible there was the field ZDATE in your form interface/ context and now it is not? I guess some source has changed so the field in the form (binding to the not existing field) cannot be processed. Otto

  • Error while updating the Assignment details using 'hr_assignment_api'

    An exception message is thrown while updating the assignment details through 'hr_assignment_api.update_emp_asg'.
    The message is 'ORA-20001: HR_FLEX_VALUE_MISSING: N, COLUMN, ASS_ATTRIBUTE6, N, PROMPT'.
    Even though a valid value is being passed to the segement,the error is thrown.
    I have tested the API in one instance and it is working fine. Recently I have migrated the codes to nother instance and encountered this error. The only difference is that in the newer instance, the segment is mandatory.
    One interesting thing is that, When I update the Assignment through application it is successfully saving the record. Then if I use the API to correct the Assignment, it is working fine. The API is not able to update only when the mandatory segment is null.
    Please help
    Thanks in advance
    Kumar

    Pl post details of OS, database and EBS versions, along with HR RUP level. It may be a bug, as outlined in MOS Doc 429169.1 (When Calling the Assignment API To Update Position, It Errors in Required Segment)
    HTH
    Srini

  • Exception while updating a table

    Hi
    while updating a table throw entity bean i am getting
    Base EJBException
    java.sql.SQLException: ORA-01401: inserted value too large for column
    how to resolve this problem
    com.sap.engine.services.ejb.exceptions.BaseEJBException: SQLException while the data is being flushed. The persistent object is com.chep.portfolio.da.ejb.admin.UserPreferencesEJBBean4_0Persistent.
         at com.sap.engine.services.ejb.entity.pm.UpdatablePersistent.ejbFlush(UpdatablePersistent.java:101)
         at com.sap.engine.services.ejb.entity.pm.TransactionContext.flushAll(TransactionContext.java:429)
         at com.sap.engine.services.ejb.entity.pm.TransactionContext.flush(TransactionContext.java:378)
         at com.sap.engine.services.ejb.entity.pm.TransactionContext.beforeCompletion(TransactionContext.java:506)
         at com.sap.engine.services.ejb.entity.SynchronizationList.beforeCompletion(SynchronizationList.java:136)
         at com.sap.engine.services.ts.jta.impl.TransactionImpl.commit(TransactionImpl.java:226)
         at com.chep.portfolio.da.ejb.admin.UserPreferencesEJBLocalLocalObjectImpl4_0.setUserProfileId(UserPreferencesEJBLocalLocalObjectImpl4_0.java:614)
         at com.chep.portfolio.admin.business.facade.user.UserFacadeBean.updateUserPreferences(UserFacadeBean.java:774)
         at com.chep.portfolio.admin.business.facade.user.UserFacadeLocalLocalObjectImpl0_0.updateUserPreferences(UserFacadeLocalLocalObjectImpl0_0.java:247)
         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
         at java.lang.reflect.Method.invoke(Method.java:324)
         at com.sap.engine.services.webservices.runtime.EJBImplementationContainer.invokeMethod(EJBImplementationContainer.java:126)
         at com.sap.engine.services.webservices.runtime.RuntimeProcessor.process(RuntimeProcessor.java:157)
         at com.sap.engine.services.webservices.runtime.RuntimeProcessor.process(RuntimeProcessor.java:79)
         at com.sap.engine.services.webservices.runtime.servlet.ServletDispatcherImpl.doPost(ServletDispatcherImpl.java:92)
         at SoapServlet.doPost(SoapServlet.java:51)
         at javax.servlet.http.HttpServlet.service(HttpServlet.java:760)
         at javax.servlet.http.HttpServlet.service(HttpServlet.java:853)
         at com.sap.engine.services.servlets_jsp.server.HttpHandlerImpl.runServlet(HttpHandlerImpl.java:401)
         at com.sap.engine.services.servlets_jsp.server.HttpHandlerImpl.handleRequest(HttpHandlerImpl.java:266)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.startServlet(RequestAnalizer.java:387)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.startServlet(RequestAnalizer.java:365)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.invokeWebContainer(RequestAnalizer.java:944)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.handle(RequestAnalizer.java:266)
         at com.sap.engine.services.httpserver.server.Client.handle(Client.java:95)
         at com.sap.engine.services.httpserver.server.Processor.request(Processor.java:175)
         at com.sap.engine.core.service630.context.cluster.session.ApplicationSessionMessageListener.process(ApplicationSessionMessageListener.java:33)
         at com.sap.engine.core.cluster.impl6.session.MessageRunner.run(MessageRunner.java:41)
         at com.sap.engine.core.thread.impl3.ActionObject.run(ActionObject.java:37)
         at java.security.AccessController.doPrivileged(Native Method)
         at com.sap.engine.core.thread.impl3.SingleThread.execute(SingleThread.java:100)
         at com.sap.engine.core.thread.impl3.SingleThread.run(SingleThread.java:170)
    Caused by: java.sql.SQLException: ORA-01401: inserted value too large for column
         at oracle.jdbc.dbaccess.DBError.throwSqlException(DBError.java:134)
         at oracle.jdbc.ttc7.TTIoer.processError(TTIoer.java:289)
         at oracle.jdbc.ttc7.Oall7.receive(Oall7.java:582)
         at oracle.jdbc.ttc7.TTC7Protocol.doOall7(TTC7Protocol.java:1986)
         at oracle.jdbc.ttc7.TTC7Protocol.parseExecuteFetch(TTC7Protocol.java:1144)
         at oracle.jdbc.driver.OracleStatement.executeNonQuery(OracleStatement.java:2152)
         at oracle.jdbc.driver.OracleStatement.doExecuteOther(OracleStatement.java:2035)
         at oracle.jdbc.driver.OracleStatement.doExecuteWithTimeout(OracleStatement.java:2876)
         at oracle.jdbc.driver.OraclePreparedStatement.executeUpdate(OraclePreparedStatement.java:609)
         at com.sap.engine.services.dbpool.wrappers.PreparedStatementWrapper.executeUpdate(PreparedStatementWrapper.java:240)
         at com.chep.portfolio.da.ejb.admin.UserPreferencesEJBBean4_0Persistent.ejb_iUpdate(UserPreferencesEJBBean4_0Persistent.java:552)
         at com.sap.engine.services.ejb.entity.pm.UpdatablePersistent.ejbFlush(UpdatablePersistent.java:80)
         ... 33 more
    java.sql.SQLException: ORA-01401: inserted value too large for column
         at oracle.jdbc.dbaccess.DBError.throwSqlException(DBError.java:134)
         at oracle.jdbc.ttc7.TTIoer.processError(TTIoer.java:289)
         at oracle.jdbc.ttc7.Oall7.receive(Oall7.java:582)
         at oracle.jdbc.ttc7.TTC7Protocol.doOall7(TTC7Protocol.java:1986)
         at oracle.jdbc.ttc7.TTC7Protocol.parseExecuteFetch(TTC7Protocol.java:1144)
         at oracle.jdbc.driver.OracleStatement.executeNonQuery(OracleStatement.java:2152)
         at oracle.jdbc.driver.OracleStatement.doExecuteOther(OracleStatement.java:2035)
         at oracle.jdbc.driver.OracleStatement.doExecuteWithTimeout(OracleStatement.java:2876)
         at oracle.jdbc.driver.OraclePreparedStatement.executeUpdate(OraclePreparedStatement.java:609)
         at com.sap.engine.services.dbpool.wrappers.PreparedStatementWrapper.executeUpdate(PreparedStatementWrapper.java:240)
         at com.chep.portfolio.da.ejb.admin.UserPreferencesEJBBean4_0Persistent.ejb_iUpdate(UserPreferencesEJBBean4_0Persistent.java:552)
         at com.sap.engine.services.ejb.entity.pm.UpdatablePersistent.ejbFlush(UpdatablePersistent.java:80)
         at com.sap.engine.services.ejb.entity.pm.TransactionContext.flushAll(TransactionContext.java:429)
         at com.sap.engine.services.ejb.entity.pm.TransactionContext.flush(TransactionContext.java:378)
         at com.sap.engine.services.ejb.entity.pm.TransactionContext.beforeCompletion(TransactionContext.java:506)
         at com.sap.engine.services.ejb.entity.SynchronizationList.beforeCompletion(SynchronizationList.java:136)
         at com.sap.engine.services.ts.jta.impl.TransactionImpl.commit(TransactionImpl.java:226)
         at com.chep.portfolio.da.ejb.admin.UserPreferencesEJBLocalLocalObjectImpl4_0.setUserProfileId(UserPreferencesEJBLocalLocalObjectImpl4_0.java:614)
         at com.chep.portfolio.admin.business.facade.user.UserFacadeBean.updateUserPreferences(UserFacadeBean.java:774)
         at com.chep.portfolio.admin.business.facade.user.UserFacadeLocalLocalObjectImpl0_0.updateUserPreferences(UserFacadeLocalLocalObjectImpl0_0.java:247)
         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
         at java.lang.reflect.Method.invoke(Method.java:324)
         at com.sap.engine.services.webservices.runtime.EJBImplementationContainer.invokeMethod(EJBImplementationContainer.java:126)
         at com.sap.engine.services.webservices.runtime.RuntimeProcessor.process(RuntimeProcessor.java:157)
         at com.sap.engine.services.webservices.runtime.RuntimeProcessor.process(RuntimeProcessor.java:79)
         at com.sap.engine.services.webservices.runtime.servlet.ServletDispatcherImpl.doPost(ServletDispatcherImpl.java:92)
         at SoapServlet.doPost(SoapServlet.java:51)
         at javax.servlet.http.HttpServlet.service(HttpServlet.java:760)
         at javax.servlet.http.HttpServlet.service(HttpServlet.java:853)
         at com.sap.engine.services.servlets_jsp.server.HttpHandlerImpl.runServlet(HttpHandlerImpl.java:401)
         at com.sap.engine.services.servlets_jsp.server.HttpHandlerImpl.handleRequest(HttpHandlerImpl.java:266)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.startServlet(RequestAnalizer.java:387)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.startServlet(RequestAnalizer.java:365)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.invokeWebContainer(RequestAnalizer.java:944)
         at com.sap.engine.services.httpserver.server.RequestAnalizer.handle(RequestAnalizer.java:266)
         at com.sap.engine.services.httpserver.server.Client.handle(Client.java:95)
         at com.sap.engine.services.httpserver.server.Processor.request(Processor.java:175)
         at com.sap.engine.core.service630.context.cluster.session.ApplicationSessionMessageListener.process(ApplicationSessionMessageListener.java:33)
         at com.sap.engine.core.cluster.impl6.session.MessageRunner.run(MessageRunner.java:41)
         at com.sap.engine.core.thread.impl3.ActionObject.run(ActionObject.java:37)
         at java.security.AccessController.doPrivileged(Native Method)
         at com.sap.engine.core.thread.impl3.SingleThread.execute(SingleThread.java:100)
         at com.sap.engine.core.thread.impl3.SingleThread.run(SingleThread.java:170)
    how to solve this one

    Hi, vanaja,
    "inserted value too large for column" - this is the reason for the exception. That is, you are trying to persist a value that exceeds the table column capacity.
    What you can do is reduce the length of the data element you want to update, or, 2) first increase the relevant column maximum length, then try to update the table again.
    If you choose 2), and if you are using the system database, then you can use Java Dictionary and follow [THIS|http://help.sap.com/saphelp_nw70/helpdata/en/fe/53fb40f17af66fe10000000a1550b0/frameset.htm] procedure to increase the maximum length of a column.
    Hope that helps!
    Regards,
    Yordan

  • Error while updating order - even after using recommended format

    Used below while updating order:
    1) Transaction
    2) Synchronization
    still getting below error:
    Caused by: atg.service.pipeline.RunProcessException: An exception was thrown from the context of the link named [updatePriceInfoObjects].
            at atg.service.pipeline.PipelineChain.runProcess(PipelineChain.java:393)
            at atg.service.pipeline.PipelineChainContext.runProcess(PipelineChainContext.java:207)
            at atg.service.pipeline.PipelineManager.runProcess(PipelineManager.java:475)
            at atg.commerce.pipeline.CommercePipelineManager.runProcess(CommercePipelineManager.java:123)
            at atg.commerce.order.OrderManager.updateOrder(OrderManager.java:2905)
            ... 64 common frames omitted
    Caused by: atg.commerce.CommerceException: Saving order 818320012 failed because doing so would result in data being overwritten. This save attempt had an out of date repository item [pricingAdjustment].
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.writeProperties(ProcSavePriceInfoObjects.java:1428)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.savePricingAdjustments(ProcSavePriceInfoObjects.java:1316)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.saveSubtotalPriceInfos(ProcSavePriceInfoObjects.java:1185)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.saveShippingItemsSubtotalPriceInfos(ProcSavePriceInfoObjects.java:1076)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.savePriceInfo(ProcSavePriceInfoObjects.java:1052)
            at atg.projects.store.order.processor.StoreProcSavePriceInfoObjects.savePriceInfo(StoreProcSavePriceInfoObjects.java:101)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.saveOrderPriceInfo(ProcSavePriceInfoObjects.java:807)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.runProcess(ProcSavePriceInfoObjects.java:716)
            at atg.service.pipeline.PipelineLink.runProcess(PipelineLink.java:255)
            at atg.service.pipeline.PipelineChain.runProcess(PipelineChain.java:365)
            ... 68 common frames omitted
    Caused by: atg.repository.ConcurrentUpdateException: no rows updated oldVersion=1 for item=pricingAdjustment:pa5090009 in GSATransaction=atg.adapter.gsa.GSATransaction@2c6042ec    thread=[ACTIVE] ExecuteThread: '5' for queue: 'weblogic.kernel.Default (self-tuning)' transaction=Xid=BEA1-439B3F1AD2C4DD758FA4(744502646),Status=Active,numRepliesOwedMe=0,numRepliesOwedOthers=0,seconds since begin=0,seconds left=3600,activeThread=Thread[[ACTIVE] ExecuteThread: '5' for queue: 'weblogic.kernel.Default (self-tuning)',5,Pooled Threads],XAServerResourceInfo[ATGProductionDS_atgdv02g3]=(ServerResourceInfo[ATGProductionDS_atgdv02g3]=(state=started,assigned=none),xar=ATGProductionDS,re-Registered = false),SCInfo[atgdv02g3+ss_ps01]=(state=active),local properties=({weblogic.jdbc.jta.ATGProductionDS=[autoCommit=true,enabled=true,isXA=true,isJTS=false,vendorID=0,connUsed=true,doInit=false,'null',destroyed=false,poolname=ATGProductionDS,appname=null,moduleName=null,connectTime=38,dirtyIsolationLevel=false,initialIsolationLevel=2,infected=false,lastSuccessfulConnectionUse=1384308810784,secondsToTrustAnIdlePoolConnection=10,currentUser=null,currentThread=null,lastUser=null,currentError=null,currentErrorTimestamp=null,JDBC4Runtime=true,supportStatementPoolable=true,needRestoreClientInfo=false,defaultClientInfo={},supportIsValid=true]}),OwnerTransactionManager=ServerTM[ServerCoordinatorDescriptor=(CoordinatorURL=ss_ps01+devrdatgap138.dev.nintendo.com:10180+atgdv02g3+t3+, XAResources={ATGPublishingDS_atgdv02g3, WLStore_atgdv02g3__WLS_ss_ps01, ATGSwitchingDS_A_atgdv02g3, ATGSwitchingDS_B_atgdv02g3, WSATGatewayRM_ss_ps01_atgdv02g3, ATGAgentDS_atgdv02g3, ATGProductionDS_atgdv02g3},NonXAResources={})],CoordinatorURL=ss_ps01+devrdatgap138.dev.nintendo.com:10180+atgdv02g3+t3+)
            at atg.adapter.gsa.GSAItemDescriptor.updateItem(GSAItemDescriptor.java:7507)
            at atg.adapter.gsa.GSARepository.updateItem(GSARepository.java:1075)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.writeProperties(ProcSavePriceInfoObjects.java:1424)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.savePricingAdjustments(ProcSavePriceInfoObjects.java:1316)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.saveSubtotalPriceInfos(ProcSavePriceInfoObjects.java:1187)
            at atg.commerce.order.processor.ProcSavePriceInfoObjects.saveShippingItemsSubtotalPriceInfos(ProcSavePriceInfoObjects.java:1077)
            ... 74 common frames omitted

    Update your Order with the pattern suggested by Devon, here is the link
    Design Pattern for Updating an ATG Order | Devon Hillard&amp;#039;s Digital Sanctuary

  • How to select and duplicate the records and update some column values using cursor

    I have a table with 920 records, we need to update the end date to 6/30/2014 for 920 records and I need to create all 920 records with start date is 7/1/2014 and update the external value to
    CCC.
    Note: the table primary key is not auto increment, but I have sp to get the latest key for that.
    Existing table.  
    ID
    Source Name
    Internal value
    External value
    Start date
    End date
    1
    XXX
    AAA
    BBB
    1/1/2013
    6/30/2015
    Create new records
    ID
    Source Name
    Internal value
    External value
    Start date
    End date
    921
    XXX
    AAA
    CCC
    7/1/2013
    12/30/2015

    Hi ManuGT
    If I understand what you need then you ask for:
    1. updating all current rows (920 rows in the table now)
    2. insert new rows which are duplicates of the preiviews rows, but with value 'CCC' insteade of 'BBB'
    If so, there is not reason to use a cursor and it is highly NOT RECOMMENDED to use ant type of loop.
    You should work with SET and do it all in 2 simple queries:
    -- first we duplicate the existing rows,
    -- but we use the values 'CCC" and '20140107' for the new rows values
    INSERT test (SourceName, InternalValue, ExternalValue, StartDate, EndDate)
    select SourceName, 'CCC', ExternalValue, '20140107' , EndDate
    from test
    where
    -- You can use any filter that you need if you dont want to update all rows
    InternalValue = 'AAA' and ExternalValue = 'BBB' and StartDate = '20140101' and EndDate = '20140630'
    -- Now we update the old rows (check the filter! I get only the old rows since I filter the new rown out)
    UPDATE test
    SET EndDate = '20140107' -- I use date in format yyyymmdd, You can use other formats as well
    where
    -- You can use any filter that you need if you dont want to update all rows
    InternalValue = 'AAA' and ExternalValue = 'BBB' and StartDate = '20140101' and EndDate = '20140630'
    Unfortunately you did not post DDL+DML! Therefore we cant see your table structure and the data sample and we can only guess. I used Saeid's post as the basic DDL+DML.
    Please next time post DDL+DML
    here is the full code with the DDL+DML that i used:
    -- This is our DDL - A create table query:
    create table test
    ( id int identity(1,1) primary key,
    SourceName nvarchar(3),
    InternalValue nvarchar(3),
    ExternalValue nvarchar(3),
    StartDate date,
    EndDate date
    go
    -- This is our DML - A query that insert some sample data
    declare @i int = 1 ;
    while @i < 921
    begin
    insert test (SourceName, InternalValue, ExternalValue, StartDate, EndDate)
    values ('XXX', 'AAA', 'BBB', '1/1/2014', '6/30/2014' ) ;
    set @i += 1 ;
    end ;
    GO
    -- Here is the solution for the problem as I understood your needs:
    -- first we duplicate the existing rows,
    -- but we use the values 'CCC" and '20140107' for the new rows values
    INSERT test (SourceName, InternalValue, ExternalValue, StartDate, EndDate)
    select SourceName, 'CCC', ExternalValue, '20140107' , EndDate
    from test
    where
    -- You can use any filter that you need if you dont want to update all rows
    InternalValue = 'AAA' and ExternalValue = 'BBB' and StartDate = '20140101' and EndDate = '20140630'
    -- Now we update the old rows (check the filter! I get only the old rows since I filter the new rown out)
    UPDATE test
    SET EndDate = '20140107' -- I use date in format yyyymmdd, You can use other formats as well
    where
    -- You can use any filter that you need if you dont want to update all rows
    InternalValue = 'AAA' and ExternalValue = 'BBB' and StartDate = '20140101' and EndDate = '20140630'
    -- Here we just check how the result look like :-)
    select *
    from test ;
    -- And since we do not realy need this table in our server... Here we clean the DDL (you probaby DO NOT WANT TO EXECUTE THIS!)
    DROP table test
    GO
    I hope this was useful :-)
    [Personal Site] [Blog] [Facebook]

  • Error while updating the forecast value in M.Master through RMDATIND

    Hi All
    Good Day
      In my custom Program I am using the three structures bgr00,bmm00,bmmh1 & bmmh3 and through rmdatind(Direct input method) program, i am trying to update the forecast value in material master data
    and i am getting error message as Specify a tracking limit and i have taken the tracking limit as 4 in my program.
    Actually there are 2 fields exists for tracking limit in structure bmmh1
    1.SIGGR and
    2.SIGG2
    I had given the  input as 4 for these 2 fields but I am getting the same error message Specify a tracking limit
    Can anyone please help me in this case.
    Thanks in advance.
    Cheers
    Vallabhaneni

    debug your program.
    Compare the field content of the value in the field you submitted with the content in field that is checked  when message M3 456 is issued.

  • Problem while updating the forecast value in m.master by direct input menth

    Hi All
      Good Day
      I am updating the forecast value in material master by direct input method.
    my program is working fine when I am running it online.
    but when i am runnig in back ground I am getting one error which is mentioned below
                                                                                    Job ZM_OPTIMIZA_UPLOAD: Data does not match the job definition; job terminated    BD    078     E    
    Please anyone can help ne in above case.
    Thanks in advance.
    Cheers
    Vallabhaneni

    Hi Hakim
    How are you
    Thanks for your valuable suggestion.
    I am executing my program in background with bmv0 transaction with variant, in development I can run this job in background with out any logical errors but in QA I am getting errors.
    Can you please help me in the above case.
    In Bmv0 transaction first I defined the job and in  program rmdatind (execute) screen first I mentioned the Max. no of logical errors as 500 and Transactions per commit unit as 500 and I run the program in background in bmv0 transaction and I am getting the message background job terminated immediately
    Because of this again I changed the Max. no of logical errors 500 to 99999 and transaction per commit unit 500 to 999999 and then i scheduled my program in background.
    Alomost it ran for 27 hours and then it has given the message background job terminated.
    Job log entries are as follows.
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040840; material number SUJ82J8400; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040840: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040841; material number SUJ84G3001; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040841: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040842; material number SUJ84G3002; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040842: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040843; material number SUJ86G0100; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040843: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040844; material number SUJ86G0300; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040844: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040845; material number SUJ86G0900; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040845: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040846; material number SUJ86G9600; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040846: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040847; material number SUJ86G9601; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040847: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:19:46  Transaction 0000040848; material number SUJ86J0000; error gravity E       
    22.12.2008  11:19:46  Transaction 0000040848: Taxes for sales org.  cannot be transferred       
    22.12.2008  11:19:46  A system error has occurred while locking                                 
    22.12.2008  11:20:54  Transaction 0000040848 completed Mat. no.  No. of log. errors 0000040848  
    22.12.2008  11:21:30  ABAP/4 processor: TSV_TNEW_PAGE_ALLOC_FAILED                              
    22.12.2008  11:21:30  Job cancelled      
    I know it has taken long time to process the job  because of I changed the transactions per commit unit (500 to 999999) it kills the system
    But I am supposed to provide some number < 999999 in transactions per commit unit
    actually I haven't tried like I said above < 999999 because i don't have any hopes.
    Please look at this problem and help me in this case.
    Thanks in advance.
    Vallabhaneni

  • Runtime exception while updating data

    Hi All,
    I'm getting a runtime exception while trying to update data to cube. It if full load and the error is like
    "A RAISE statement in the program "SAPLSDIFRUNTIME" raised the exception
    condition "NOT_FOUND".
    Since the exception was not intercepted by a superior
    program, processing was terminated."
    The runtime exception is "RAISE_EXCEPTION".
    Can anyone pls help me understand it and how to overcome it.
    Thanks in advance,
    Sananda

    Hi,
    Are you trying to load from DSO->Cube? or PSA->Cube?
    Whatever the case, take an ABAPer's help and analyze the dump. You will get a clue.......
    Regards,
    Suman

  • Update item characteristic values using BAPI_SALESORDER_CHANGE

    Hi Experts,
    Could anyone give me sample code on how to update the characteristic values of a sales order item? Points will be awarded... Thanks!
    Regards,
    LM

    Just checked on OSS note 549563 regarding this. This is resolved. Thanks

  • Exception while excel processing after upload using commons file upload

    Hi all,
    I am experiencing problem while creating a workbook after getting the input stream from the uploaded file. its not going catch block instead it is going to finally and giving null pointer exeption in finally as one variable in finally is not defined. the variable is defined in try as well as catch but during run time the variable is not getting assigned any value also. I'll put the code over here. please help me with a solution
    import org.w3c.dom.* ;
    import java.io.FileInputStream;
    import java.io.IOException;
    import java.io.InputStream;
    import jxl.*;
    import org.apache.commons.fileupload.*;
    import org.apache.commons.fileupload.disk.*;
    import org.apache.commons.fileupload.servlet.*;
    import org.apache.poi.poifs.filesystem.POIFSFileSystem;
    import org.apache.poi.hssf.usermodel.HSSFCell;
    import org.apache.poi.hssf.usermodel.HSSFSheet;
    import org.apache.poi.hssf.usermodel.HSSFWorkbook;
    import org.apache.poi.hssf.usermodel.HSSFRow;
    public class CescoreUploadServlet extends baseHttpServlet
         private DataSource cesDS = null;
         public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
              doPost(req, res);
         public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
              String targetPage = null;
              File f = null;
              System.out.println("Upload Controller");
              HttpSession session = req.getSession(true);
              try
                   if(cesDS == null){
                        cesDS = new JNDIDataSource(getServletContext().getInitParameter(Constants.DATA_SOURCE_NAME));
                   CesRepository cRep = new CesRepository(cesDS);
                   if (session.getAttribute("DataContainerInfo") == null) {
                        System.out.println("Initializing DataContainerInfo");
                        DataContainer DataContainer = new DataContainer();
                        cRep.setInitialParameters(DataContainer);
                        session.setAttribute("DataContainerInfo",DataContainer);
                   else System.out.println("DataContainerInfo is available");
                   UserInfo userInfo = null;               
                   String login_id = req.getRemoteUser();
                   if(session.getAttribute("UserID") != null) login_id = (String)session.getAttribute("UserID");
                   if(session.getAttribute("userProfile") == null ) session.setAttribute("userProfile", cRep.getUserInfo(login_id));
                   userInfo = (UserInfo)session.getAttribute("userProfile");
                   System.out.println("<<<<<< userInfo contains : "+userInfo.getHrID()+" >>>>>>");
                   String projIdValue = null;
                   String msg = null;
                   boolean isMultipart = FileUpload.isMultipartContent(req);
                   if(isMultipart){
                        System.out.println("is MultiPart");
                        DiskFileUpload upload = new DiskFileUpload();
                        List fileList = upload.parseRequest(req);
                        InputStream uploadedFileStream = null;
                        String uploadedFileName = null;
                        ArrayList impArray = new ArrayList();
                        Iterator iter = fileList.iterator();
                        while (iter.hasNext()) {
                             System.out.println("inside while");
                             FileItem item = (FileItem) iter.next();
                             if (!item.isFormField()) {
                                  System.out.println("item is not form field");
                                  if (item.getSize() < 1)
                                       throw new Exception("No file was uploaded");
                                  else
                                       uploadedFileName = item.getName();
                                       System.out.println("uploaded file name "+uploadedFileName);
                                       System.out.println("uploaded file size is "+item.getSize());
                                       uploadedFileStream = item.getInputStream();
                                       System.out.println("uploaded input stream available size is "+uploadedFileStream.available());
                             else
                                  System.out.println("item is form field");
                                  String key     = item.getFieldName();
                                  String value = item.getString();
                                  System.out.println("key is"+key);
                                  System.out.println("value is"+value);
                                  if(key.equals("projectId2")){
                                       projIdValue = value;
                        System.out.println("outside while");
                        POIFSFileSystem fs = new POIFSFileSystem(uploadedFileStream);
                        System.out.println("got POIFSFileSystem");//this is been printed in logs
                        HSSFWorkbook wb = new HSSFWorkbook(fs);//it is breaking over here
                        System.out.println("got HSSFWorkbook");//this is not been printed in logs
                        HSSFSheet sheet = wb.getSheetAt(0);
                        System.out.println("got HSSFSheet");
                        Iterator rows = sheet.rowIterator();
                        if(rows.hasNext()){
                        while( rows.hasNext() ) {
                             System.out.println("rows iteration");
                             HSSFRow row = (HSSFRow) rows.next();
                             Iterator cells = row.cellIterator();
                             while( cells.hasNext() ) {
                                  System.out.println("cell iteration");
                                  HSSFCell cell = (HSSFCell) cells.next();
                                  HashMap hm = new HashMap();//if everything is fine i'll use this hashmap to store values
                             System.out.println("CES UPLOAD.SERVLET. After adding");
                             msg = "Attendees have been added successfully";
                             req.setAttribute("msgText", msg);
                             targetPage = "/ces_disp.jsp";
                        else
                             throw new Exception("The Excel Sheet Uploaded has no entries. Please check and try again");
                   else{
                        throw new Exception("The Form is not Multipart");
              catch (Exception e)
                   System.out.println("CES UPLOAD.SERVLET.EXCEPTION ::: Exception");
                   targetPage = "/ces_disp.jsp";
                   if(e != null) req.setAttribute("msgText", e.getMessage());
                   else req.setAttribute(Constants.EXCEPTION_ATTR_NAME, new Exception("Unknown Exception"));
                   e.printStackTrace();
              finally{
                   System.out.println("CES UPLOAD.SERVLET. ::: Finally");
                   ServletContext stx = getServletConfig().getServletContext();
                   RequestDispatcher dispatcher = sCx.getRequestDispatcher(targetPage);
                   dispatcher.forward(req, res);
    Message was edited by: Noufal
    Noufal_k
    Message was edited by:
    Noufal_k

    import org.w3c.dom.* ;
    import java.io.FileInputStream;
    import java.io.IOException;
    import java.io.InputStream;
    import jxl.*;
    import org.apache.commons.fileupload.*;
    import org.apache.commons.fileupload.disk.*;
    import org.apache.commons.fileupload.servlet.*;
    import org.apache.poi.poifs.filesystem.POIFSFileSystem;
    import org.apache.poi.hssf.usermodel.HSSFCell;
    import org.apache.poi.hssf.usermodel.HSSFSheet;
    import org.apache.poi.hssf.usermodel.HSSFWorkbook;
    import org.apache.poi.hssf.usermodel.HSSFRow;
    public class CescoreUploadServlet extends baseHttpServlet
    private DataSource cesDS = null;
    public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
    doPost(req, res);
    public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
    //including only relevant code
    String targetPage = null;
    System.out.println("Upload Controller");
    HttpSession session = req.getSession(true);
    try
    String projIdValue = null;
    String msg = null;
    boolean isMultipart = FileUpload.isMultipartContent(req);
    if(isMultipart){
    System.out.println("is MultiPart");
    DiskFileUpload upload = new DiskFileUpload();
    List fileList = upload.parseRequest(req);
    InputStream uploadedFileStream = null;
    String uploadedFileName = null;
    Iterator iter = fileList.iterator();
    while (iter.hasNext()) {
    System.out.println("inside while");
    FileItem item = (FileItem) iter.next();
    if (!item.isFormField()) {
    System.out.println("item is not form field");
    if (item.getSize() < 1)
    throw new Exception("No file was uploaded");
    else
    uploadedFileName = item.getName();
    System.out.println("uploaded file name "+uploadedFileName);//printing  c:/excelsheets/fileToUpload.xls
    System.out.println("uploaded file size is "+item.getSize());//printing size is 15872
    uploadedFileStream = item.getInputStream();
    System.out.println("uploaded input stream available size is "+uploadedFileStream.available());//printing available input stream size is 15872
    else
    System.out.println("item is form field");
    String key = item.getFieldName();
    String value = item.getString();
    System.out.println("key is"+key);
    System.out.println("value is"+value);
    if(key.equals("projectId2")){
    projIdValue = value;
    System.out.println("outside while");
    POIFSFileSystem fs = new POIFSFileSystem(uploadedFileStream);
    System.out.println("got POIFSFileSystem");//this is been printed in logs
    HSSFWorkbook wb = new HSSFWorkbook(fs);//it is breaking over here
    System.out.println("got HSSFWorkbook");//this is not been printed in logs
    HSSFSheet sheet = wb.getSheetAt(0);
    System.out.println("got HSSFSheet");
    Iterator rows = sheet.rowIterator();
    if(rows.hasNext()){
    while( rows.hasNext() ) {
    System.out.println("rows iteration");
    HSSFRow row = (HSSFRow) rows.next();
    Iterator cells = row.cellIterator();
    while( cells.hasNext() ) {
    System.out.println("cell iteration");
    HSSFCell cell = (HSSFCell) cells.next();
    HashMap hm = new HashMap();//if everything is fine i'll use this hashmap to store values
    System.out.println("CES UPLOAD.SERVLET. After adding");
    msg = "Attendees have been added successfully";
    req.setAttribute("msgText", msg);
    targetPage = "/ces_disp.jsp";
    else
    throw new Exception("The Excel Sheet Uploaded has no entries. Please check and try again");
    else{
    throw new Exception("The Form is not Multipart");
    catch (Exception e)
    System.out.println("CES UPLOAD.SERVLET.EXCEPTION ::: Exception");
    targetPage = "/ces_disp.jsp";
    if(e != null) req.setAttribute("msgText", e.getMessage());
    else req.setAttribute(Constants.EXCEPTION_ATTR_NAME, new Exception("Unknown Exception"));
    e.printStackTrace();
    finally{
    System.out.println("CES UPLOAD.SERVLET. ::: Finally");
    ServletContext stx = getServletConfig().getServletContext();
    RequestDispatcher dispatcher = stx.getRequestDispatcher(targetPage);//throwing null pointer exception for this line
    dispatcher.forward(req, res);
    }

  • Email Clob values using zip.

    I have table cms_reports. Tbl srtucture group id number data type file is clob datatype
    group_id File (clob) Filename(varchar2)
    1 clobdata(1) file1
    1 clobdata(2) file2
    1 clobdata(3) file3
    1 clobdata(4) file4
    2 clobdata(5) file5
    2 clobdata(6) file6
    I have to write a procedure that takes group id as IN parameter
    if I pass 1 as group id to the procedure, proc can zip file1,file2,file3 and file4, then send an email.
    file1 should hold clobdata(1)
    file2 should hold clobdata(2)
    file3 should hold clobdata(3)
    file4 should hold clobdata(4)
    is it possible or any suggestions?

    Anton,
    I think , I will convert all the clobs to blobs, then I will use your package to zip multiple blobs to one zip file.
    I am using this below code to convet the clob to blob and then pass it to your package code. am I doing right thing?
    CREATE OR REPLACE FUNCTION f_clob_to_blob(p_clob IN CLOB) RETURN BLOB IS
    v_blob BLOB;
    v_offset NUMBER DEFAULT 1;
    v_amount NUMBER DEFAULT 4096;
    v_offsetwrite NUMBER DEFAULT 1;
    v_amountwrite NUMBER;
    v_buffer VARCHAR2(4096 CHAR);
    BEGIN
    dbms_lob.createtemporary(v_blob, TRUE);
    Begin
    LOOP
    dbms_lob.READ(p_clob, v_amount, v_offset, v_buffer);
    v_amountwrite := utl_raw.length(utl_raw.cast_to_raw(v_buffer));
    dbms_lob.WRITE(v_blob, v_amountwrite, v_offsetwrite, utl_raw.cast_to_raw(v_buffer));
    v_offsetwrite := v_offsetwrite + v_amountwrite;
    v_offset := v_offset + v_amount;
    v_amount := 4096;
    END LOOP;
    EXCEPTION
    WHEN no_data_found THEN
    NULL;
    End;
    RETURN v_blob;
    END f_clob_to_blob;
    /

  • Invalid Attribute value Exception while adding Timestamp Attribute value

    Good Day friends,
    I'm using IBM Directory Server 4.1 LDAP to maintain the Online User Registeration for a Web Application. I need to store the Timestamp at the time user registers, so that another java application of mine can delete non-active users within specific amount of time. But when i provide a value for "createTimestamp" attribute in LDAP using Java sql class object Timestamp, it gives me a exception of InvalidAttributeValueException. I tried inserting the entry with String, but without success. What is the type or format for createTimestamp Attribute? How i can bind a time or date object to this object? Any Help or Suggestions will be highly appreciated. Thanks in Advance. Cheers..!!
    The source code is as follows:
    public void createUserProfile(String afname, String alname, String aemail, String apassword) {
              String fname, lname, email, password;
              fname = afname;
              lname = alname;
              email = aemail;
              password = apassword;
              Date regDate = new Date();
              //java.sql.Time tsObj = new java.sql.Time(regDate.getTime());
              Timestamp tsObj = new Timestamp(regDate.getTime());
              String strTS = tsObj.toString();
              System.out.println("Created Date: "+tsObj);
              try {
                   InitialDirContext ctx = new InitialDirContext(prop);
                   BasicAttribute objClasses = new BasicAttribute("objectclass");
                   objClasses.add("inetOrgPerson");
                   BasicAttributes attrs = new BasicAttributes();
                   attrs.put(objClasses);
                   attrs.put("givenName", fname);
                   attrs.put("sn", lname);
                   attrs.put("mail", email);
                   attrs.put("userPassword", password);
                   attrs.put("createTimestamp", tsObj);
                   ctx.createSubcontext("cn="+fname+",ou=Dept,o=InterComp", attrs);
                   ctx.close();
                   } catch (Exception e) {
                   System.out.println("Error : " + e.getMessage());
                   e.printStackTrace();
    The Error is as follows:
    Error : Malformed 'createTimestamp' attribute value
    javax.naming.directory.InvalidAttributeValueException: Malformed 'createTimestamp' attribute value; remaining name 'cn=arun15382,ou=Dept,o=InterComp'
         at com.sun.jndi.ldap.LdapClient.encodeAttribute(LdapClient.java:964)
         at com.sun.jndi.ldap.LdapClient.add(LdapClient.java:1012)
         at com.sun.jndi.ldap.LdapCtx.c_createSubcontext(LdapCtx.java:648)
         at com.sun.jndi.toolkit.ctx.ComponentDirContext.p_createSubcontext(ComponentDirContext.java:323)
         at com.sun.jndi.toolkit.ctx.PartialCompositeDirContext.createSubcontext(PartialCompositeDirContext.java:253)
         at com.sun.jndi.toolkit.ctx.PartialCompositeDirContext.createSubcontext(PartialCompositeDirContext.java:241)
         at javax.naming.directory.InitialDirContext.createSubcontext(InitialDirContext.java:180)

    I've the same problem with lastModifiedTime:
    java.sql.Timestamp cal = new java.sql.Timestamp(System.currentTimeMillis());
    myAttrs.put(new BasicAttribute("lastModifiedTime", cal));
    this results in:
    javax.naming.directory.InvalidAttributeValueException: Malformed 'lastModifiedTime' attribute value; remaining name 'uid=tester,ou=people,dc=...'
         at com.sun.jndi.ldap.LdapClient.encodeAttribute(LdapClient.java:1041)
         at com.sun.jndi.ldap.LdapClient.add(LdapClient.java:1089)
    Have you fixed it?

Maybe you are looking for

  • Tax code P0 does not appear in any G/L line item

    Hi, I am using BAPI_ACC_DOCUMENT_POST to upload the AP/AR Journals. I have a scenario wherein for AP document type 'KA' both the line items are of vendor . One line item though has a special G/L indicator and a tax code to it. When i execute the prog

  • Can't change Google Talk name in Messages

    I'm having trouble changing my displayed name in Messages in Google Chat. In "Internet Accounts", I only have my iCloud account and my Google account, both of which say my name. Additionally, in Messages, all of my accounts say my name. However, my b

  • SAP PI Roadmap for future release

    Hi, Can anybody give some idea/link about SAP PI roadmap for next 2-3 years. Regards,

  • Problems importing from inDesign 5.5

    I had a book document started in inDesign 5.5 and have imported it to inDesign 6. When I choose one of the font styles I had set up before it comes into the document with Bold text when the sytle definition called for italic. I check the style defini

  • Error during ACT_UPG phase.

    Hi, It is a MaxDB database with source system as CRM 7.0 EHP1 ; target CRM 7.0 EHP2 During an EHP upgrade I am getting the below error. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DDIC ACTIVATION ERRORS and RETURN CODE in