Daily dump by Autoabap - DATA_LENGTH_0

Hello,
We use our Flash recovery area as our archive log destination and since we started doing so we get this dump:
DATA_LENGTH_0 - CX_SY_RANGE_OUT_OF_BOUNDS
The problem occurs here (in RSORADBA):
262   perform get_db_parameter(rsora001) using 'log_archive_dest'
  263                                  archdir.
  264   if archdir = space.
  265     perform get_db_parameter(rsora001) using 'log_archive_dest_1
  266                                  archdir.
  267     if archdir cs 'LOCATION='.
  268       len = sy-fdpos + 9.
  269     else.
  270       len = 0.
  271     endif.
  272     move archdir+len to archdir1.
  273     if archdir1 ca ' '.
  274       len = sy-fdpos.
  275     else.
  276       len = strlen( archdir1 ).
  277     endif.
  ***********     move archdir1(len) to archdir.
  279   endif.
Since log_archive_dest and log_archive_dest1 is empty this will always happen until I disable this program.
My question is, how do I disable this program? Or, how do I tell this program to stop check my archive dest? The same dump occures when I push the button "Archiving directory status" in DB12.
Thank you!!
Br Linus Hellsing
Edit: Design
Edited by: Linus Hellsing on Aug 28, 2009 11:09 AM

Hi,
i got the same Problem after upgrading to 1102.
After reading some Notes and some Forum Messages I disabled the Parameter log_archive_dest and set 
the Parameter "log_archive_dest_1" as described in Note 1431798: alter system set log_archive_dest_1 ='LOCATION=<SAPDATA_HOME>/oraarch/<sid>arch' SCOPE = SPFILE SID='*';
Now it works
Kind Regards
Wolfgang

Similar Messages

  • Problem while changing Sales order using 'BAPI_SALESORDER_CHANGE'

    Hi all,
    Below is my code to update delivery block value:
    FORM call_bapi_salesorder_change .
    DATA:  iv_bapi_view  LIKE order_view.
    DATA: gt_sales_doc TYPE STANDARD TABLE OF sales_key, " Document Numbers to Be Selected
            gt_items TYPE STANDARD TABLE OF bapisdit. " Order Item Data for Document Numbers
    *************************************************************8
    DATA:order_headers_out LIKE bapisdhd OCCURS 0 WITH HEADER LINE.
    DATA:order_header_inx LIKE bapisdh1x.
      DATA: lv_salesdocument LIKE bapivbeln-vbeln.
      data: lv_matnr(10) type c.
      DATA:
    gt_schdule TYPE STANDARD TABLE OF bapischdl, " for gl date & load date
    gt_schdulex TYPE STANDARD TABLE OF bapischdlx, " for partner role and partner function.
            gt_return TYPE STANDARD TABLE OF bapiret2,
            gt_itemin TYPE STANDARD TABLE OF bapisditm,
            gt_iteminx TYPE STANDARD TABLE OF bapisditmx ,
            gt_bapisdh1 TYPE STANDARD TABLE OF  bapisdh1.
      DATA:gs_head_bapi TYPE bapisdh1x,
           gs_schdule TYPE bapischdl,
           gs_schdulex TYPE bapischdlx,
           gs_return TYPE bapiret2,
           gs_itemin TYPE bapisditm,
           gs_iteminx TYPE bapisditmx,
           gs_bapisdh1 TYPE bapisdh1.
      CLEAR : gt_schdule,gt_schdulex,gs_return.
      REFRESH gt_return.
      gs_head_bapi-updateflag = 'U'.
    SORT gt_final BY vbeln.
    CLEAR gs_vbep.
    REFRESH gt_vbep[].
      CLEAR gs_final.
      LOOP AT gt_final1 INTO gs_final.
        gs_itemin-itm_number = gs_final-posnr.
    *move gs_final-matnr to lv_matnr."qx94162
       gs_itemin-material = gs_final-matnr.
        CALL FUNCTION 'CONVERSION_EXIT_ALPHA_OUTPUT'
          EXPORTING
            input  = gs_final-matnr
          IMPORTING
            output = gs_final-matnr.
       CALL FUNCTION 'CONVERSION_EXIT_ALPHA_INPUT'
        EXPORTING
          input  = lv_matnr
        IMPORTING
         output = lv_matnr.
       gs_itemin-material = lv_matnr.
        gs_itemin-material = gs_final-matnr.
        gs_iteminx-itm_number = gs_final-posnr.
        gs_iteminx-updateflag = 'U'.""changed to I
        gs_iteminx-material = 'X'.
        " gs_schdule-req_dlv_bl = gs_final-lifsp. " Delivery block
        gs_schdule-itm_number = gs_final-posnr.
        gs_schdulex-itm_number = gs_final-posnr.
        gs_schdule-sched_line = '0001'.
        gs_schdulex-sched_line = '0001'.
       MOVE gs_final-lifsp to gs_schdule-req_dlv_bl.
        gs_schdule-req_dlv_bl = gs_final-lifsp.
        gs_schdulex-req_dlv_bl = 'X'. " Delivery block
        gs_schdulex-updateflag = 'U'.
        gs_bapisdh1-sales_org = gs_final-vkorg.
        APPEND gs_schdule TO gt_schdule.
        APPEND gs_schdulex TO gt_schdulex.
        APPEND gs_itemin TO gt_itemin.
        APPEND gs_iteminx TO gt_iteminx.
       Append gs_BAPISDH1 to gt_BAPISDH1.
        lv_salesdocument = gs_final-vbeln.
        CALL FUNCTION 'BAPI_SALESORDER_CHANGE'
          EXPORTING
            salesdocument               = lv_salesdocument     
           ORDER_HEADER_IN             = gs_BAPISDH1
            order_header_inx            = gs_head_bapi         
      SIMULATION                  =
      BEHAVE_WHEN_ERROR           = ' '
      INT_NUMBER_ASSIGNMENT       = ' '
      LOGIC_SWITCH                =
      NO_STATUS_BUF_INIT          = ' '
          TABLES
            return                      = gt_return            
       order_item_in               =    gt_itemin
       order_item_inx              = gt_iteminx
      PARTNERS                    =
      PARTNERCHANGES              =
      PARTNERADDRESSES            =
      ORDER_CFGS_REF              =
      ORDER_CFGS_INST             =
      ORDER_CFGS_PART_OF          =
      ORDER_CFGS_VALUE            =
      ORDER_CFGS_BLOB             =
      ORDER_CFGS_VK               =
      ORDER_CFGS_REFINST          =
          schedule_lines                = gt_schdule           
          schedule_linesx               = gt_schdulex          
      ORDER_TEXT                  =
      ORDER_KEYS                  =
      CONDITIONS_IN               =
      CONDITIONS_INX              =
      EXTENSIONIN                 =
      NFMETALLITMS                =
        CALL FUNCTION 'BAPI_TRANSACTION_COMMIT'.
        REFRESH: gt_schdule, gt_schdulex, gt_itemin , gt_iteminx.
        LOOP AT gt_return INTO gs_return.
          WRITE / gs_return-message .
        ENDLOOP .
      ENDLOOP.
    ENDFORM.                    " CALL_BAPI_SALESORDER_CHANGE
    When I am using material value as 18342 using CONVERSION_EXIT_ALPHA_OUTPUT:
    ORDER_HEADER_IN has been processed successfully
    Material  is not defined for sales org.A101,  distr.chan.00,  language DE
    Error in ITEM_IN 000030
    SCHEDULE_IN has been processed successfully
    The sales document is not yet complete: Edit data
    Sales document 5010000715 was not changed
    and when I am using CONVERSION_EXIT_ALPHA_INPUT (000000000000018342)or  while debuging when I am changing value to  0018342 :
    I am getting below dump:
    Runtime Errors         DATA_LENGTH_0
    Exception              CX_SY_RANGE_OUT_OF_BOUNDS
    Date and Time          21.10.2010 07:22:17
    Information on where terminated
         Termination occurred in the ABAP program "SAPLVBAK" - in
          "MAP_VBAP_TO_BAPISDIT".
         The main program was "RS_TESTFRAME_CALL ".
         In the source code you have the termination point in line 168
         of the (Include) program "LVBAK001".
         The termination is caused because exception "CX_SY_RANGE_OUT_OF_BOUNDS"
          occurred in
         procedure "MAP_VBAP_TO_BAPISDIT" "(FORM)", but it was neither handled local
          nor declared
         in the RAISING clause of its signature.
         The procedure is in program "SAPLVBAK "; its source code begins in line
         99 of the (Include program "LVBAK001 ".
    The same dump I am getting while executing this above BAPI from SE37 tcode with the same test data.
    but I am able to change it though Va02 tcode.
    Edited by: Arora26 on Oct 21, 2010 7:58 AM

    Hi,
    have you searched for OSS notes? The note 1137897 looks like related to your problem. It mentions different BAPI but I assume that both BAPI reuse same routines and therefore it might solve your problem as well.
    Cheers

  • Canon HF20: backup the MTS files (and later convert), or import in AIC?

    Camcorder: Canon HF20. FCE 4.01.
    I have filmed some clips with the 7 mbps setting. The file size of the mts files is fairly small, but when importing it to FCE (or iMovie) the filesize is increased 10 times (to about 60 mbps) I found the answer in this post: http://discussions.apple.com/thread.jspa?messageID=10900327&#10900327 ,
    I want to save all my video clips to my external drive, and I am tempted to just copy the mts files to a folder, and then later convert them when I want to edit the footage. Would this be possible, or do I have to import them in FCE now?
    Is it possible to copy the mts files back to the camcorder at a later time (the entire file structure - the folder is called BDMV) and then import it in FCE?

    Yeah, make sure you copy the entire folder structure from the highest level and DONT delete ANY files (like I did!)
    I spent the entire night last night and the last week trying to get compatibility of .MTS files shot on a Canon HFS11 into FCE 4 (FCE 4.0.1 to be exact).
    I shot 3 months worth of footage on the HFS11 and would daily dump the AVCHD folders into date folders (ie 060110, 070110, etc etc) on an external HDD.
    Since I had all the .MTS files off the camera I didnt know if I could use Log and Transfer to bring them into FCE but it is possible. Like other forums many people suggested you had to have the folders exactly like they were on the camera. This is true, unless of course you have deleted some files or if any become corrupt then the structure isnt exactly the same. For me I had deleted any junk files using a fast .MTS viewer (called a windows 7 machine!) without realising it was going to affect the 'import' process in FCE.
    SOLUTION
    I found that the files in the CLIPINF folder (.CLI files) were matched to the .MTS files in the STREAM folder. So if you delete any .MTS files I guess Log and Transfer reads the INDEX.BDM file or the .CLI files and says hang on, something is different. So I went through my .CLI files and found the names missing from the .MTS files and duplicated a colour bar clip I made and renamed it to the missing files. Viola, and FCE 4 could 'import' them.

  • Tracking Personas Use/Users??

    I am curious if there is a report that shows users who are executing a transaction within Personas vs. people who are doing the same transaction via standard SAP GUI? Is there a way to see who is logging in to Personas on a regular basis?
    Thanks for any input!
    Cheryl

    That's a really good question, and one I've been struggling to find an answer to also. The best I've come up with it to take a daily dump of statistics from STAD, filtered to just include web traffic (task type H or T as appropriate). That gives individual lines for each transaction with user, date, time, and a lot more that can be ignored. This does also include webgui traffic, and I can't see an easy way of distinguishing Personas from webgui. We have no significant webgui usage so that's not a problem for us.
    I take this data and feed it into excel, and build pivot tables on it. Of course, I should use Lumira, but...
    So that gives me things like this - users by transaction used:
    Or this - users by date:
    It is tedious, not least because the STAD output format is a pain to get into Excel, because it can't be automated, and because the STAD data doesn't stick around long so if you forget for a few days you lose it. I did this religiously for a few months after we went live, but I've got out of the habit now, which is why the above data is from February. I do wish there was a better way...
    Steve.

  • Re: Forte Gremlins -- Thanks

    We are using vF.2 running on NT 4.0.
    Here are some of the responses I got. In case anyone is interested.
    It looks like we aren't completely crazy.
    Regards,
    Ty
    Response #1
    You did not mention what platform or version of Forte you are running.
    We had similar problems with Forte version 2.D on Sun/Solaris using
    both the gemstone repository and objectivity. We have since moved to
    Forte version 2F and our repository problems are gone (knock wood).
    You may also want to call Forte tech support. They were helpful in
    helping resolve our problems(we had a corrupt environment at one
    point). We started doing twice daily dumps of a workspace that
    contained all projects. We first update and then export. This does
    prevent integration during the 20 minutes it takes for the update and
    export.
    Response #2
    We had the same problems ( Reposity corrupted, code that disappeared...).
    , but in ( VMS, first btree, next ctree ). We find that the problem only
    raise when not have space in disk.
    Response #3
    Have you Shadow repository ?
    If yes, it could exist some data in the
    shadow, witch was not backing up with the repository,
    especialy if the shadow repository was detached to
    improve performance.
    Our problems were not related to a Shadow Repository

    We are using vF.2 running on NT 4.0.
    Here are some of the responses I got. In case anyone is interested.
    It looks like we aren't completely crazy.
    Regards,
    Ty
    Response #1
    You did not mention what platform or version of Forte you are running.
    We had similar problems with Forte version 2.D on Sun/Solaris using
    both the gemstone repository and objectivity. We have since moved to
    Forte version 2F and our repository problems are gone (knock wood).
    You may also want to call Forte tech support. They were helpful in
    helping resolve our problems(we had a corrupt environment at one
    point). We started doing twice daily dumps of a workspace that
    contained all projects. We first update and then export. This does
    prevent integration during the 20 minutes it takes for the update and
    export.
    Response #2
    We had the same problems ( Reposity corrupted, code that disappeared...).
    , but in ( VMS, first btree, next ctree ). We find that the problem only
    raise when not have space in disk.
    Response #3
    Have you Shadow repository ?
    If yes, it could exist some data in the
    shadow, witch was not backing up with the repository,
    especialy if the shadow repository was detached to
    improve performance.
    Our problems were not related to a Shadow Repository

  • Advice needed: is BDB a good fit for what I aim at?

    Hello everyone,
    I'm not a BDB user (yet), but I really think that this the BDB library
    IS the perfect fit for my needs.
    I'm designing an application with a "tricky" part, that requires a very fast
    data storage/retrieval solution, mainly for writes (but for reads too).
    Here's a quick summary of this tricky part, that should at least use
    2 databases:
    - the first db will hold references to contents, with a few writes per hour
    (the references being "pushed" to it from a separate admin back end), but
    expected high numbers of reads
    - the second db will log requests and other events on the references
    contained in the first db: it is planned that, on average, one read from DB1
    will produce five times as much writes into DB2.
    To illustrate:
    DB1 => ~25 writes / ~100 000 reads per hour
    DB2 => ~500 000 writes / *(60?) reads per hour
    (*will explain about reads on DB2 later in this post)
    Reads and writes on both DBs are not linear, say that for 500 000 writes
    per hour, you could have the first 250 000 being done within 20 minutes,
    for instance. There will be picks of activity, and low activity phases
    as well.
    That being said, do the BDB experts here think that BDB is a good fit for
    such a need? If so or if not, could you please let me know what makes you
    think what you think? Many thanks in advance.
    Now, about the "*(60?) reads per hour" for BD2: actually, data from DB2
    should be accessed in real time for reporting. As of now, here is what
    I thing I should do to insure and preserve a high rate throughput not to
    miss any write in DB2 => once per minute another "DB2" is created that will
    now record new events. The "previous" DB2 is now dumped/exported into another
    database which will then be queried for real-time (not exactly real-time,
    but up to five minutes is an acceptable delay) reporting.
    So, in my first approach, DB2 is "stopped" then dumped each minute, to another
    DB (not necessarily BDB, by the way - data could probably re-structured another
    way into another kind of NoSQL storage to facilite queriing and retrieval
    from the admin back end), which would make 60 reads per hour (but "entire"
    reads, full db)
    The questions are:
    - do you think that renewing DB2 as often would improve or strain performances?
    - is BDB good and fast at doing massive dumps/exports? (OK: 500 000 entries per
    hour would make ~8300 entries per minute on average, so let's say that a dump's
    max size is 24 000 rows of data)
    - would it or not be better to read directly into the current DB2 as it is
    storing (intensively) new rows, which would then avoid the need to dump each
    minute and then provide more real-time features? (then would just need a daily
    dump, to archive the "old" data)
    Anyone who has had to face such questions already is welcome, as well as
    any BDB user who think they can help on this topic!
    Many thanks in advance for you advice and knowledge.
    Cheers,
    Jimshell

    Hi Ashok
    Many thanks for your fast reply again :)
    Ashok_Ora wrote:
    Great -- thanks for the clarification.Thank YOU, my first post was indeed a bit confusing, at least about the reads on DB2.
    Ashok_Ora wrote:
    Based on this information, it appears that you're generating about 12 GB/day into DB2, which is about a terabyte of data every 3 months. Here are some things to consider for ad-hoc querying of about 1 TB of data (which is not a small amount of data).That's right, this is quite a huge lot of data, and will keep growing, and growing... Although the main goal of the app is to be able to achieve (almost) real time reporting, it will also need to be able (potentially) to compute data over different time ranges, including yearly ranges for instance - but in this case, the real time capabilities wouldn't be relevant, I guess: if you look at some data on a year span, you probably don't need it to be accurate on a dayly interval, for instance (well, I guess), so this part of the app would probably only use the "very old" data (not the current day data), whatever it is stored in...
    Ashok_Ora wrote:
    Query performance is dramatically improved by using indexes. On the other hand, indexing data during the insert operation is going to add some overhead to the insert - this will vary depending on how many fields you want to index (how many secondary indices you want to create). BDB automatically indexes the primary key. Generally, any approach that you consider for satisfying the reporting requirement will benefit from indexing the data.> Thanks for pointing that out! I did envisage using indexes, but my concern was (and you guessed it) the expectable overhead that it brings. At this stage (but I may be wrong, this is just a study in progress, that will also need proper tests and benchmarking), I plan to favour write speed over anything else, to insure that all the incoming data is indeed stored, even if it is quite tough to handle in the primary stored form.
    I prefer to envisage (but again, it's not said that it is the right way of doing it) very fast inserts, then possibly re-process (sort of) the data later, and (maybe? certainly?) elsewhere, in order to have it more "query friendly" and efficient for moderately complex queries for legible reports/charts.
    Ashok_Ora wrote:
    Here are some alternatives to consider, for the reporting application:
    - Move the data to another system like MongoDB or CouchDB as you suggest and run the queries there. The obvious cost is the movement of data and maintaining two different repositories. You can implement the data movement in the way I suggested earlier (close "old" and open "new" periodically).This is pretty much "in line" with what I had in mind when posting my question here :).
    I found out in several benchmarks (there are not a lot, but I did find some ^^) that BDB amongst others is optimized for bunch queries, say that retrieving a whole lot of data is faster that, for instance, retrieving n times the same row. Is that right? Now, I guess that this is tightly related to the configuration and the server's performances...
    The process would then feed data into a new "DB2" instance every 60 seconds, and "dumping"/merging the previous one into another DB (BDB or else), which would grow until some defined limit.
    Would the "old DB2" > "main, current archive" be a heavy/tricky process, according to you? Especially as the "archive" DB is growing and growing - what would be a decent "limit" to take into account? I guess that 1TB for 3 months of data would be a bit big, wouldn't it?
    Ashok_Ora wrote:
    - Use BDB's SQL API to insert and read data in DB1 and DB2. You should be able to run ad-hoc queries using SQL. After doing some experiments, you might decide to add a few indices to the system. This approach eliminates the need to move the data and maintaining separate repositories. It's simpler.I read a bit about it, and this is indeed very interesting capabilities - especially as I know how to write decent SQL statements.
    That would mean that DB2 could grow more than just within a 60 seconds time span - but would this growing alter the write troughput? I guess so... This will require proper tests, definitely.
    Now, I plan the "real" data (the "meaningfull part of the data"), except timestamps, to be stored in quite a "NoSQL" way (this term is "à la mode"...), say as JSON objects (or something close to it).
    This is why I envisaged MongoDB for instance as the DB layer for the reporting part, as it is able to query directly into JSON, with a specific way to handle "indexes" too. But I'm no MongoDB expert in any way, so I'm not sure at all, again, that it is a good fit (just as much as I'm not sure right know what the proper, most efficient approach is, at this stage).
    Ashok_Ora wrote:
    - Use the Oracle external table mechanism (Overview and how-to - http://docs.oracle.com/cd/B28359_01/server.111/b28319/et_concepts.htm) to query the data from Oracle database. Again, you don't need to move the data. You won't be able to create indices on the external tables. If you do want to move data from the BDB repository into Oracle DB, you can run a "insert into <oracle_table> select * from <external_table_in_DB2>;". As you know, Oracle database is excellent database for all sorts of applications, including complex reporting applications.
    This is VERY interesting. VERY.
    And Oracle DB is, you're, a very powerful and flexible database for every kind of processes.
    I'll look into the docs carefully, many thanks for pointing that out (again!) :)
    I have not yet decided if the final application would be free nor open source, but this will eventually be a real question. Right now, I don't want to think of it, and just find the best technical solution(s) to achieve the best possible results.
    And BDB and Oracle DB are very serious competitors, definitely ;)
    Ashok_Ora wrote:
    Hope this was helpful. Let me know your thoughts.It definitely is so much useful! Makes things clearer and allow me to get more into BDB (and Oracle as well with your latest reply), and that's much appreciated. :)
    As I said, my primary goal is to insure the highest write throughput - I cannot miss any incoming data as there is no (easy/efficient) way to re-ask for what would be lost and get it again being sure that it hadn't changed (the simple act of re-asking would induce data flaws, actually).
    So, everything else (including reporting, stats, etc.) IS secondary, as long as what comes in is always stored for sure (almost) as soon as it comes in.
    This is why, in this context, "real" real-time is not really crucial, an can be "1 minute delayed" real time (could even be "5 minute delayed", actually, but let's be a bit demanding ^^).
    Ashok_Ora wrote:
    Just out of curiousity, can you tell us some additional details about your application?Of course, I owe you a bit more details as you help me a lot in my research/study :)
    The application is sort of a tracking service. It is primarily thought to serve the very specific needs of a client of mine: they have several applications that all use the same "contents". Those contents can be anything, text, HTML, images, whatever, and they need to know almost in real time what application (used by which external client/device) is requesting ressources, which ones, from where, in which locale/area and language, etc.
    Really a kind of "Google Analytics" stuff (which I pointed out at the very beginning, but they need something more specific, and, above all, they need to keep all the data with them, so GA is not a solution here).
    So, as you can guess, this is pretty much... big. On the paper, at least. Not sure if this will ever be implemented one day, to be honest with you, but I really want to do the technical study seriously and bring the best options so that they know where they plan to go.
    As of me, I would definitely love it if this could become reality, this is very interesting and exciting stuff. Especially as it requires to see things as they are and not to fall into the "NoSQL fashion" for the sake of being "cool". I don't want a cool application, I want an efficient one, that fits the needs ;) What is very interesting here is that BDB is not new at all, though it's one of the most serious identified players so far!
    Ashok_Ora wrote:
    Thanks and warm regards.
    ashokMany thanks again, Ashok!
    I'll leave this question opened, in order to keep on posting as I'm progressing (and to be able to get your thoughts and rewarding comments and advice above all :) )
    Cheers,
    Jimshell

  • "DATA_LENGTH_0" Short dump

    Hi SAP experts!
    We will appreciate a lot if you could help us with your expertise to solve an short dump loading data.
    Trying to load Master data for one object, when we do only to PSA everything is fine. However, if we want to continue to its data target, we got a message of "DATA_LENGTH_0" short dump, making us impossible to contineu.
    the information that the system gives us is just below of this Email.
    Thank you very much in advance for your support!
    Error analysis
        An exception occurred. This exception is dealt with in more detail below
        . The exception, which is assigned to the class 'CX_SY_RANGE_OUT_OF_BOUNDS',
         was neither
        caught nor passed along using a RAISING clause, in the procedure "ROUTINE_9998"
         "(FORM)"
        Since the caller of the procedure could not have expected this exception
         to occur, the running program was terminated.
        The reason for the exception is:
        In program "GP40TYLPX0VKPP4E1JQZ7JCWSKY" the system tried to access field
         "LV_NODENAME" with length 0.
        Partial field access to fields with length 0 is not allowed, however.
    How to correct the error
        Please use a positive length value when trying to access a partial field
        "LV_NODENAME" zugegriffen werden soll.
        If the error occurred in one of your own programs or in an SAP program
        that you modified, try to correct it yourself.

    Tahnk you very much Santosh!
    That note recomens us to install in Support Package 14, but we got already installed the 21.
    Any other idea?
    Thank you very much!

  • FOTV Dump  DATA_LENGTH_0

    Hello everybody,
    has anyone encountered the dump  DATA_LENGTH_0 in FOTV the last days? (extract see below / full dump see attachment)
    Since yesterday two of our customers are facing this issue and I'm afraid it might become more. Last month everthing worked well for them.
    They have different ERP Release - 7.40 and 7.00 - but I think similar SP levels considered the time the SPs were released (7.40 Basis 4, 7.00 Basis 29).
    They use the transaction FOTV to transfer data to the tax authority as follows:
    FOTV
    enter details/select variant --> choose Display Data Only --> execute
    then select entry to transfer and go to transfer data
    popup window is shown and you're asked if you F I N A L L Y want to transfer the data to the tax authority --> YES
    ---> Dump
    DATA_LENGTH_0
    Category              ABAP Programming Error
    Runtime Errors        DATA_LENGTH_0
    Except.                CX_SY_RANGE_OUT_OF_BOUNDS
    ABAP Program          CL_IM_FOT_VAT_DECL_BC_DE======CP
    Application Component  FI
    Information on where terminated
        The termination occurred in ABAP program "CL_IM_FOT_VAT_DECL_BC_DE======CP",
        in "IF_EX_FOT_VAT_BC~SEND_DECLARATION_BC". The main program
        was "FOT_B2A_ADMIN                          ".
        In the source code, the termination point is in line 256 of (Include)
        program "CL_IM_FOT_VAT_DECL_BC_DE======CM001".
        The termination is due to exception "CX_SY_RANGE_OUT_OF_BOUNDS" occurring in
        procedure "IF_EX_FOT_VAT_BC~SEND_DECLARATION_BC" "(METHOD)".
        This exception was not handled locally or declared in the RAISING
        clause in the procedure's signature however.
        The procedure is in program "CL_IM_FOT_VAT_DECL_BC_DE======CP        ". The
        source code begins in line 1 of
        (include) program "CL_IM_FOT_VAT_DECL_BC_DE======CM001    ".
    The thing is that those are two pretty different customers - with different realeses on different platforms different databases....
    Does anyone of you face similar problems? Or does anybody have an idea for the root cause of this problem?
    We couldn't find any SAP note or hint that could help concerning this case and before we create a SAP customer message I wanted to ask a bit around.
    Thanks in advance for sharing your ideas.
    Marie

    Well - SAP just released note 2051342 which should solve this problem
    Edit: it really solved the problem

  • Daily,weekly and monthly report format

    hi
       i  was asked to create a format for daily, weekly and monthly system performance report .. can anyone send me  sample report format..

    Hi Sandy,
    System Performance is always checked by a list of our lovely transactions in BASIS I generally had created one for my self and keep a track of performance..
    IT goes like this..
    Daily : DB12(Check Backup Logs);SM51(Check serv are UP);RZ20(Look for ALerts); SM13;SM21;SM37(check for cancelled jobs);SM12; st22 (dumps); ST02; SM35; SP01 (Spool jobs for more time); AL02; ST04; AL16.
    Weekly : DB12; DB02; SAPDBA checks ; DB drives and space;  EWA report of the past week.
    Monthly : More or less the same stuff of Weekly  with Archive files as well ...and SAPDBA cleanup logs... Monitor the DB growth and check for the backup logs...
    Hopefully this info shd help yoiu in this regard.
    Please award suitable points for the post.
    Br,
    Sri

  • Dump Error

    Dump Msg
                                                                                    With ABAP/4 Open SQL array select, the output table is too small.                                                                               
    What happened?                                                                               
    Error in ABAP application program.                                                                               
    The current ABAP program "ZFLEXI" had to be terminated because one of the               
    statements could not be executed.                                                               
    This is probably due to an error in the ABAP program.             
    My logic is :
    *& Report  : ZDLYDSPHNEW                                               *
    *& Purpose : Daily Godown dispatches with condition valu details       *
    REPORT ZDLYDSPH NO STANDARD PAGE HEADING LINE-SIZE 255 LINE-COUNT 30(10)
    MESSAGE-ID SK .
    TYPE-POOLS: SLIS,ABAP.
    CONSTANTS: FORMNAME_TOP_OF_PAGE TYPE SLIS_FORMNAME VALUE 'TOP_OF_PAGE'.
    TABLES : VBRK,     "Billing document header detail
             VBRP,     "Billing document item details
             MAKT,     "Material Description
             BHDGD,
             MARA,     "Material Master
             KNA1,     "Customer Master Table
             TSPA,
            J_1IEXCHDR.
    FIELD-SYMBOLS: <DYN_TABLE> TYPE STANDARD TABLE,
                   <DYN_WA>,
                   <DYN_FIELD>.
    DATA: DY_TABLE TYPE REF TO DATA,
          DY_LINE  TYPE REF TO DATA,
          XFC TYPE LVC_S_FCAT,
          IFC TYPE LVC_T_FCAT,
          DYN_TABLE TYPE STANDARD TABLE OF KOMG WITH HEADER LINE.
    DATA: P_TABLE(30) TYPE C VALUE 'VBRK'.
    Selection Screen Started
    SELECTION-SCREEN SKIP 1.
    SELECTION-SCREEN BEGIN OF BLOCK B_1 WITH FRAME TITLE TEXT-010.
    SELECT-OPTIONS : S_FKDAT FOR VBRK-FKDAT OBLIGATORY,
                     S_SPART FOR VBRK-SPART OBLIGATORY,
                     S_VBELN FOR VBRK-VBELN,
                     S_KUNAG FOR VBRK-KUNAG,
                     S_FKART FOR VBRK-FKART,
                     S_WERKS FOR VBRP-WERKS," obligatory,
                       S_VKBUR FOR VBRP-VKBUR,
                     S_VKORG FOR VBRK-VKORG OBLIGATORY,
                        S_VKGRP FOR VBRP-VKGRP, " sales group
                     S_RFBSK FOR VBRK-RFBSK OBLIGATORY DEFAULT 'C'.
    SELECTION-SCREEN END OF BLOCK B_1.
    SELECTION-SCREEN SKIP.
    SELECTION-SCREEN BEGIN OF BLOCK B_2 WITH FRAME TITLE TEXT-011.
    PARAMETERS : P_VARI LIKE DISVARIANT-VARIANT.
    SELECTION-SCREEN END OF BLOCK B_2.
    START-OF-SELECTION.
    PERFORM GET_FIELDS.
      PERFORM GET_STRUCTURE.
      PERFORM NEW_DYNAMIC_TABLE.
      PERFORM GET_OUTPUT_DATA.
      PERFORM OUTPUT_WRITE.
      PERFORM GENERATE_OUTPUT.
    *&      Form  NEW_DYNAMIC_TABLE
          text
    -->  p1        text
    <--  p2        text
    FORM NEW_DYNAMIC_TABLE .
    Create dynamic internal table and assign to FS
      CALL METHOD CL_ALV_TABLE_CREATE=>CREATE_DYNAMIC_TABLE
        EXPORTING
          IT_FIELDCATALOG = IFC
        IMPORTING
          EP_TABLE        = DY_TABLE.
      ASSIGN DY_TABLE->* TO <DYN_TABLE>.
    Create dynamic work area and assign to FS
      CREATE DATA DY_LINE LIKE LINE OF <DYN_TABLE>.
      ASSIGN DY_LINE->* TO <DYN_WA>.
    PERFORM ASSIGN_VALUES-SELECTED.
    ENDFORM.                    " NEW_DYNAMIC_TABLE
    *&      Form  GENERATE_OUTPUT
          text
    -->  p1        text
    <--  p2        text
    FORM GENERATE_OUTPUT .
    ENDFORM.                    " GENERATE_OUTPUT
    *&      Form  GET_BILLING_HEADER
          text
    -->  p1        text
    <--  p2        text
    FORM GET_BILLING_HEADER .
    SELECT * INTO CORRESPONDING FIELDS OF TABLE I_VBRK
       FROM VBRK
        WHERE VBELN IN S_VBELN
          AND FKART IN S_FKART
          AND VKORG IN S_VKORG
          AND FKDAT IN S_FKDAT
          AND RFBSK IN S_RFBSK
          AND KUNAG IN S_KUNAG
          AND SPART IN S_SPART ORDER BY FKDAT.
    LOOP AT I_VBRK.
       ASSIGN I_VBRK TO <HEADER>.
      ASSIGN I_VBRK TO <DATA_TAB>.
       ASSIGN I_VBRK TO <F>.
       CASE TITEL-FELDNAME.
         WHEN OTHERS.
           READ TABLE G_T_TEXTS_ALL
         WITH KEY TABNAME   = TITEL-TABNAME
                  FIELDNAME = TITEL-FIELD
         INTO G_S_TEXT.
           DATEN-DATEN = <HEADER>.
           DATEN-DATEN = <DATAIN>.
           MOVE-CORRESPONDING <HEADER> TO DATEN.
           CLEAR DATEN-DATEN.
       if titel-tabname+0(5) = 'VBRK'.
           ASSIGN (TITEL-FELDNAME) TO <HEADER>.
           DATEN-DATEN = <HEADER>.
       ENDIF.
       ENDCASE.
       DATEN-ZEILE = ZEILE.
       APPEND DATEN.
    ENDLOOP.
    ENDFORM.                    " GET_BILLING_HEADER
    *&      Form  GET_STRUCTURE
          text
    -->  p1        text
    <--  p2        text
    FORM GET_STRUCTURE .
      DATA : IDETAILS TYPE ABAP_COMPDESCR_TAB,
      XDETAILS TYPE ABAP_COMPDESCR.
      DATA : REF_TABLE_DES TYPE REF TO CL_ABAP_STRUCTDESCR.
    Get the structure of the table.
      REF_TABLE_DES ?=
      CL_ABAP_TYPEDESCR=>DESCRIBE_BY_NAME( P_TABLE ).
      IDETAILS[] = REF_TABLE_DES->COMPONENTS.
      LOOP AT IDETAILS INTO XDETAILS.
        CLEAR XFC.
        XFC-FIELDNAME = XDETAILS-NAME .
        XFC-DATATYPE = XDETAILS-TYPE_KIND.
        XFC-INTTYPE = XDETAILS-TYPE_KIND.
        XFC-INTLEN = XDETAILS-LENGTH.
        XFC-DECIMALS = XDETAILS-DECIMALS.
        APPEND XFC TO IFC.
      ENDLOOP.
    ENDFORM.                    " GET_STRUCTURE
    *&      Form  GET_OUTPUT_DATA
          text
    -->  p1        text
    <--  p2        text
    FORM GET_OUTPUT_DATA .
    Select Data from table.
      SELECT * INTO TABLE <DYN_TABLE>
          FROM ( VBRK )
           WHERE VBELN IN S_VBELN
             AND FKART IN S_FKART
             AND VKORG IN S_VKORG
             AND FKDAT IN S_FKDAT
             AND RFBSK IN S_RFBSK
             AND KUNAG IN S_KUNAG
             AND SPART IN S_SPART ORDER BY FKDAT.
    ENDFORM.                    " GET_OUTPUT_DATA
    *&      Form  OUTPUT_WRITE
          text
    -->  p1        text
    <--  p2        text
    FORM OUTPUT_WRITE .
      LOOP AT <DYN_TABLE> INTO <DYN_WA>.
        DO.
          ASSIGN COMPONENT SY-INDEX
          OF STRUCTURE <DYN_WA> TO <DYN_FIELD>.
          IF SY-SUBRC EQ 0.
            EXIT.
          ENDIF.
          IF SY-INDEX = 1.
            WRITE:/ <DYN_FIELD>.
          ELSE.
            WRITE: <DYN_FIELD>.
          ENDIF.
        ENDDO.
      ENDLOOP.
    ENDFORM.                    " OUTPUT_WRITE
    Edited by: ABHUT on May 27, 2008 11:34 AM

    Dump Msg
    With ABAP/4 Open SQL array select, the output table is too small.
    What happened?
    Error in ABAP application program.
    The current ABAP program "ZFLEXI" had to be terminated because one of the
    statements could not be executed.
    This is probably due to an error in the ABAP program.
    My logic is :
    *& Report : ZDLYDSPHNEW *
    *& Purpose : Daily Godown dispatches with condition valu details *
    REPORT ZDLYDSPH NO STANDARD PAGE HEADING LINE-SIZE 255 LINE-COUNT 30(10)
    MESSAGE-ID SK .
    TYPE-POOLS: SLIS,ABAP.
    CONSTANTS: FORMNAME_TOP_OF_PAGE TYPE SLIS_FORMNAME VALUE 'TOP_OF_PAGE'.
    TABLES : VBRK, "Billing document header detail
    VBRP, "Billing document item details
    MAKT, "Material Description
    BHDGD,
    MARA, "Material Master
    KNA1, "Customer Master Table
    TSPA,
    J_1IEXCHDR.
    FIELD-SYMBOLS: <DYN_TABLE> TYPE STANDARD TABLE,
    <DYN_WA>,
    <DYN_FIELD>.
    DATA: DY_TABLE TYPE REF TO DATA,
    DY_LINE TYPE REF TO DATA,
    XFC TYPE LVC_S_FCAT,
    IFC TYPE LVC_T_FCAT,
    DYN_TABLE TYPE STANDARD TABLE OF KOMG WITH HEADER LINE.
    DATA: P_TABLE(30) TYPE C VALUE 'VBRK'.
    Selection Screen Started
    SELECTION-SCREEN SKIP 1.
    SELECTION-SCREEN BEGIN OF BLOCK B_1 WITH FRAME TITLE TEXT-010.
    SELECT-OPTIONS : S_FKDAT FOR VBRK-FKDAT OBLIGATORY,
    S_SPART FOR VBRK-SPART OBLIGATORY,
    S_VBELN FOR VBRK-VBELN,
    S_KUNAG FOR VBRK-KUNAG,
    S_FKART FOR VBRK-FKART,
    S_WERKS FOR VBRP-WERKS," obligatory,
    S_VKBUR FOR VBRP-VKBUR,
    S_VKORG FOR VBRK-VKORG OBLIGATORY,
    S_VKGRP FOR VBRP-VKGRP, " sales group
    S_RFBSK FOR VBRK-RFBSK OBLIGATORY DEFAULT 'C'.
    SELECTION-SCREEN END OF BLOCK B_1.
    SELECTION-SCREEN SKIP.
    SELECTION-SCREEN BEGIN OF BLOCK B_2 WITH FRAME TITLE TEXT-011.
    PARAMETERS : P_VARI LIKE DISVARIANT-VARIANT.
    SELECTION-SCREEN END OF BLOCK B_2.
    START-OF-SELECTION.
    PERFORM GET_FIELDS.
    PERFORM GET_STRUCTURE.
    PERFORM NEW_DYNAMIC_TABLE.
    PERFORM GET_OUTPUT_DATA.
    PERFORM OUTPUT_WRITE.
    PERFORM GENERATE_OUTPUT.
    *& Form NEW_DYNAMIC_TABLE
    text
    --> p1 text
    <-- p2 text
    FORM NEW_DYNAMIC_TABLE .
    Create dynamic internal table and assign to FS
    CALL METHOD CL_ALV_TABLE_CREATE=>CREATE_DYNAMIC_TABLE
    EXPORTING
    IT_FIELDCATALOG = IFC
    IMPORTING
    EP_TABLE = DY_TABLE.
    ASSIGN DY_TABLE->* TO <DYN_TABLE>.
    Create dynamic work area and assign to FS
    CREATE DATA DY_LINE LIKE LINE OF <DYN_TABLE>.
    ASSIGN DY_LINE->* TO <DYN_WA>.
    PERFORM ASSIGN_VALUES-SELECTED.
    ENDFORM. " NEW_DYNAMIC_TABLE
    *& Form GENERATE_OUTPUT
    text
    --> p1 text
    <-- p2 text
    FORM GENERATE_OUTPUT .
    ENDFORM. " GENERATE_OUTPUT
    *& Form GET_STRUCTURE
    text
    --> p1 text
    <-- p2 text
    FORM GET_STRUCTURE .
    DATA : IDETAILS TYPE ABAP_COMPDESCR_TAB,
    XDETAILS TYPE ABAP_COMPDESCR.
    DATA : REF_TABLE_DES TYPE REF TO CL_ABAP_STRUCTDESCR.
    Get the structure of the table.
    REF_TABLE_DES ?=
    CL_ABAP_TYPEDESCR=>DESCRIBE_BY_NAME( P_TABLE ).
    IDETAILS[] = REF_TABLE_DES->COMPONENTS.
    LOOP AT IDETAILS INTO XDETAILS.
    CLEAR XFC.
    XFC-FIELDNAME = XDETAILS-NAME .
    XFC-DATATYPE = XDETAILS-TYPE_KIND.
    XFC-INTTYPE = XDETAILS-TYPE_KIND.
    XFC-INTLEN = XDETAILS-LENGTH.
    XFC-DECIMALS = XDETAILS-DECIMALS.
    APPEND XFC TO IFC.
    ENDLOOP.
    ENDFORM. " GET_STRUCTURE
    *& Form GET_OUTPUT_DATA
    text
    --> p1 text
    <-- p2 text
    FORM GET_OUTPUT_DATA .
    Select Data from table.
    SELECT * INTO TABLE <DYN_TABLE>
    FROM ( VBRK )
    WHERE VBELN IN S_VBELN
    AND FKART IN S_FKART
    AND VKORG IN S_VKORG
    AND FKDAT IN S_FKDAT
    AND RFBSK IN S_RFBSK
    AND KUNAG IN S_KUNAG
    AND SPART IN S_SPART ORDER BY FKDAT.
    ENDFORM. " GET_OUTPUT_DATA
    *& Form OUTPUT_WRITE
    text
    --> p1 text
    <-- p2 text
    FORM OUTPUT_WRITE .
    LOOP AT <DYN_TABLE> INTO <DYN_WA>.
    DO.
    ASSIGN COMPONENT SY-INDEX
    OF STRUCTURE <DYN_WA> TO <DYN_FIELD>.
    IF SY-SUBRC EQ 0.
    EXIT.
    ENDIF.
    IF SY-INDEX = 1.
    WRITE:/ <DYN_FIELD>.
    ELSE.
    WRITE: <DYN_FIELD>.
    ENDIF.
    ENDDO.
    ENDLOOP.
    ENDFORM. " OUTPUT_WRITE
    Edited by: ABHUT on May 27, 2008 11:34 AM

  • RDDGENOL job getting cancelled with huge number of ABAP dumps

    Hi,
    In our Production system, we had applied Kernel upto patch level 263 and while
    applying SPAM 30 for release 640 it failed. We tried applying Basis patch 15 still
    it did not went through.
    Since then we are getting around 1500-2500 dumps daily. The job RDDGENOLis getting cancelled and the dumps RAISE_EXCEPTION and SYNTAX_ERROR is displayed in ST22. Today, there were around 2500 dumps in the system.
    When I now run the Database <-> ABAP Dictionary consistency check in
    DB02, I see number of errors for missing objects in the database. There
    are around of dozens of Primary Indexes, secondary Indexes, Tables,
    Views listed in the result:
    Objects Missing in database No.
    =========================== ===
    Primary indexes 6
    Secondary indexes 14
    Tables 37
    Views 26
    When I try to create each of them using SE16 or using SQL utility on OS
    level, it gives an SQL system error. We are also getting thousands of
    dumps each day (attached alongwith this message). Also I see the CPF327E error
    in the OS Level Joblog.
    Also we cannot transport any requests. There is some problem with or
    due to the conversions. Is it a problem with Kernel, ABAP dictionary or
    database.
    Following is the extract of the dumps due to DDIC in 000:
    ============================================================
    Runtime Error          SYNTAX_ERROR
    Date and Time         
    ShrtText
    Syntax error in program "CL_WB_CROSSREFERENCE==========CP ".
    What happened?
    Error in ABAP application program.
    The current ABAP program "SAPLSEWB" had to be terminated because one of the
    statements could not be executed.
    This is probably due to an error in the ABAP program.
    In program "CL_WB_CROSSREFERENCE==========CP ", the following syntax error
    occurred
    in the Include "CL_WB_CROSSREFERENCE==========CM00D " in line 8:
    |    "The type "KEYWORD_TAB" is unknown." 
    ============================================================
    ============================================================
    Runtime Error          RAISE_EXCEPTION
    Date and Time         
    ShrtText
    Exception condition "DESTINATION_NOT_OPEN" raised.
    What happened?
    The current ABAP/4 program encountered an unexpected
    situation.
    What can you do?
    Print out the error message (using the "Print" function)
    and make a note of the actions and input that caused the
    error.
    To resolve the problem, contact your SAP system administrator.
    You can use transaction ST22 (ABAP Dump Analysis) to view and administer
    termination messages, especially those beyond their normal deletion
    date.
    is especially useful if you want to keep a particular message.
    Error analysis
    A RAISE statement in the program "SAPLCRFC" raised the exception
    condition "DESTINATION_NOT_OPEN".
    Since the exception was not intercepted by a superior program
    in the hierarchy, processing was terminated.
    Short description of exception condition:
    Specified destination is not open.
    For detailed documentation of the exception condition, use
    Transaction SE37 (Function Library). You can take the called
    function module from the display of active calls.
    How to correct the error
    You may able to find an interim solution to the problem
    in the SAP note system. If you have access to the note system yourself,
    use the following search criteria:
    "RAISE_EXCEPTION" C
    "SAPLCRFC" or "LCRFCU20"
    "RFC_CONNECTION_CLOSE"
    or
    "SAPLCRFC" "DESTINATION_NOT_OPEN"
    or
    "RADBAT01 " "DESTINATION_NOT_OPEN"
    If you cannot solve the problem yourself and you wish to send
    an error message to SAP, include the following documents:
    1. A printout of the problem description (short dump)
    To obtain this, select in the current display "System->List->
    Save->Local File (unconverted)".
    2. A suitable printout of the system log
    To obtain this, call the system log through transaction SM21.
    Limit the time interval to 10 minutes before and 5 minutes
    after the short dump. In the display, then select the function
    "System->List->Save->Local File (unconverted)".
    3. If the programs are your own programs or modified SAP programs,
    supply the source code.
    To do this, select the Editor function "Further Utilities->
    Upload/Download->Download".
    4. Details regarding the conditions under which the error occurred
    or which actions and input led to the error.
    System environment
    SAP Release.............. "640"
    Application server....... "TXTPDSAP"
    Network address.......... "172.16.0.140"
    Operating system......... "OS400"
    Release.................. "5.3"
    Hardware type............ "0065000655EC"
    Character length......... 8 Bits
    Pointer length........... 64 Bits
    Work process number...... 35
    Short dump setting....... "full"
    Database server.......... "TXTPDSAP"
    Database type............ "DB400"
    Database name............ "TXT"
    Database owner........... "R3TXTDATA"
    Character set............ "en_US.ISO8859-1"
    SAP kernel............... "640"
    Created on............... "Dec 11 2008 23:06:45"
    Created in............... "AIX 1 5 00538A4A4C00 (IBM iSeries with OS400)"
    Database version......... "DB4_52"
    Patch level.............. "263"
    Patch text............... " "
    Supported environment....
    Database................. "V5R2, V5R3, V5R4, V6R1"
    SAP database version..... "640"
    Operating system......... "OS400 2 5, OS400 3 5, OS400 4 5, OS400 1 6"
    Memory usage.............
    Roll..................... 696832
    EM....................... 16759712
    Heap..................... 0
    Page..................... 32768
    MM Used.................. 1383120
    MM Free.................. 3483600
    SAP Release.............. "640"
    User and Transaction
    Client.............. 000
    User................ "DDIC"
    Language key........ "E"
    Transaction......... " "
    Program............. "SAPLCRFC"
    Screen.............. "SAPMSSY0 1000"
    Screen line......... 6
    Information on where terminated
    The termination occurred in the ABAP program "SAPLCRFC" in
    "RFC_CONNECTION_CLOSE".
    The main program was "RADBAT01 ".
    The termination occurred in line 22 of the source code of the (Include)
    program "LCRFCU20"
    of the source code of program "LCRFCU20" (when calling the editor 220).
    The program "SAPLCRFC" was started as a background job.
    Job name........ "RDDGEN0L"
    Job initiator... "DDIC"
    |    Job number...... 00032101    
    ============================================================
    Kindly let me know the immediate fix. Thanks!
    Regards,
    Nick

    Hi Nick,
    I would say, you are having a slight missperception of this forum ... it should not replace your basis ressources in your company.
    What you are doing is pretty complex (and the mixtrure of SPAM Update AND Basis SP seems to be wrong to me) and therefore, it will not be possible, to help you, just based on such a short dump.
    Sorry,
    Volker Gueldenpfennig, consolut international ag
    http://www.consolut.net - http://www.4soi.de - http://www.easymarketplace.de

  • Reading Short Dump in ST22

    Hi , Is there any Function Module or a way  to read the Short Dump generated in ST22 for a particular program.?
    I have a Z program running in Background on daily basis.For error Handling , when the program generates a short dump , i want to read the short dump. I tried the FM  /SDF/GET_DUMP_LOG .with this i am able to read the  runtime error , exception , error short text  but i am unable to read the entire log description.
    like what happened , what can u do etc  which can be viewed in st22.Any Inputs Appreciated.
    Thanks & Regards,
    John.

    Hi,
    Try FM
    "STRUCTURE_DUMP"     -- Current contents of internal tables will be printed
    RS_SNAP_DUMP_DISPLAY
    Best regards,
    Prashant

  • Can I dump a .Mac account from Mail on  1 computer and not lose the data on

    I have 3 Macs: eMac, intel iMac, Macbook; all are using Mac Mail. 2 users look at our single .mac IMAP account daily; my son never uses it on his machine. Consequently he's accumulated thousands of messages, attachments, etc. on his machine that he'll never use. I want to dump the .mac account from his machine but I know If I delete messages on one machine it will dump them from the server. Can I first delete the account on the intel iMac, then go to the appropriate mail folder on his machine & empty it of files to reduce the clutter without affecting what's on the .Mac server and subsequently the messages on the other 2 machines?
    Thanks for your help

    If the messages are on the server then there are no messages on his machine. Thus, if you dump the account all the messages on the server will be permanently lost. An IMAP account leaves all messages on the server.

  • How to keep short dumps for a longer time

    Hi All,
    I need to write a program to fetch all the below mentioned fields from the system dump tables and insert into a user defind table. Please help me with the code. Thanks in advance....
    PROBLEM DESCRIPTION
    ABAP runtime errors (also known as short-dumps) are kept in the system only for two days, mainly because every short-dump can use a lot of disk space depending on short-dump type. Problem is that this reduced time frame (two days) is not allowing us to focus in solve the more frequent ones and we do not have any statistic about:
        how many are we getting
        how often
        repetitive ones
    PROPOSED SOLUTION
    We need a program to be executed in a daily job right after midnight which will take the basic information of all today’s short-dumps and store it in a custom table.
    Information to be kept:
          DATE
          TIME
          HOST
          USER
          SYSTEM
          CLIENT
          ERROR ID
          TRANSACTION
          PROGRAM NAME
          PROGRAM NAME VERSION (TRANSPORT REQUEST NUMBER)
          PROGRAM NAME VERSION TIMESTAMP (transported to PPx)
          SCREEN
          SCREEN LINE
          MAIN PROGRAM NAME
          MAIN PROGRAM NAME VERSION (TRANSPORT REQUEST NUMBER)
          MAIN PROGRAM NAME VERSION TIMESTAMP (transported to PPx)
          PROGRAM LINE NUMBER AT TERMINATION
          SAP RELEASE
          SAP KERNEL
          PATCH LEVEL
          DATABASE VERSION
    The program must have archiving capabilities implemented. In the month end run it will also perform the table house-keeping accordingly with a parameter like this:
         ERROR-ID
         MONTHS TO KEEP
    We need to have the possibility of defining different retention periods for different kind of errors, for example our friend GETWA_TOO_MANY_SEGMENT could be stored for a year and a simple CONVT_NO_NUMBER only for a month. One of the parameters should be the default one, something like 
            ERROR-ID **************
            MONTHS TO KEEP 01.

    Aruna,
    Following may be the action points for your requirements.
    1. Create Custom table with all fields you required.
    2. Create ABAP program to extract the data from SAP standard table where runtime error information is stored.
    Like: select * from snap_beg into wa_snap_beg
              where seqno = '000'
              and   datum in s_datum
              and   uzeit in s_uzeit
              and   ahost in s_ahost
              and   uname in s_uname
              and   mandt in s_mandt.......
    So SNAP_BEG is a table where you can find dump information.
    After extracting the data from this table insert it into a Custom table which you have created in Step 1.
    Use INSERT statement for this.
    3. Create transaction for this report.
    4. Schedule this program every day so that you will get all data into custom table.
    I hope this will help you.
    Regards,
    Amey

  • Dump error while executing this unicode program.....

    Hi,
         I got a program for 'Production Order Quantity Change Daily Checklist' ,but while executing i got the dump error from the following program stating tat,
      The current statement is only defined for character-type data objects.
    Error in the ABAP Application Program
    The current ABAP program "ZPROD" had to be terminated because it has
    come across a statement that unfortunately cannot be executed.
    For the statement
       "READ DATASET ... INTO f"
    only character-type data objects are supported at the argument position
    "f".
    In this case. the operand "f" has the non-character-type "u". The
    current program is a Unicode program. In the Unicode context, the type
    'X' or structures containing not only character-type components are
    regarded as non-character-type.
    Program:
    REPORT ZPPORDER LINE-SIZE 180 NO STANDARD PAGE HEADING
                    LINE-COUNT 058(001).
    TABLES: AUFK,  "Order master data
            AFKO,  "Order header data PP orders
            RESB,  "Reservation/dependent requirements
            MAST,  "Material to BOM Link
            STKO,  "BOM Header
            STPO.  "BOM item
    DATA: BEGIN OF WA,
             AUART      TYPE AUFK-AUART,
             AUFNR      TYPE AUFK-AUFNR,
             AEDAT      TYPE AUFK-AEDAT,
             AENAM      TYPE AUFK-AENAM,
             WERKS      TYPE AUFK-WERKS,
             PLNBEZ     TYPE AFKO-PLNBEZ,
             GAMNG      TYPE AFKO-GAMNG,
             GASMG      TYPE AFKO-GASMG,
             MATNR      TYPE RESB-MATNR,
             POSNR      TYPE RESB-POSNR,
             BDMNG      TYPE RESB-BDMNG,
             BMENG      TYPE STKO-BMENG,
             MENGE      TYPE STPO-MENGE,
          END OF WA,
          ITAB  LIKE SORTED   TABLE OF WA WITH NON-UNIQUE KEY AUFNR POSNR.
    DATA: BEGIN OF ITAB2 OCCURS 0.
          INCLUDE STRUCTURE WA.
    DATA: END OF ITAB2.
    DATA: BEGIN OF ITAB_AUFK OCCURS 0,
          AUART  LIKE AUFK-AUART,
          AUFNR  LIKE AUFK-AUFNR,
          POSNR  LIKE RESB-POSNR,
          AEDAT  LIKE AUFK-AEDAT,
          AENAM  LIKE AUFK-AENAM,
          WERKS  LIKE AUFK-WERKS,
          PLNBEZ LIKE AFKO-PLNBEZ,
          GAMNG(7) TYPE P DECIMALS 0,
          GASMG(7) TYPE P DECIMALS 0,
          MATNR  LIKE RESB-MATNR,
          BDMNG(7) TYPE P DECIMALS 0,
          BMENG(7) TYPE P DECIMALS 0,
          MENGE(7) TYPE P DECIMALS 3.
    DATA: END OF ITAB_AUFK.
    DATA: FDATE LIKE SY-DATUM,
          LDATE LIKE SY-DATUM.
    DATA: X_AUFNR LIKE AFKO-AUFNR,
          X_MENGE(7) TYPE P DECIMALS 0,
          X_ERR(3).
    DATA: W_DATASET1(500) VALUE '/usr/sap/trans/data/'.
    SELECT-OPTIONS T_WERKS  FOR  AUFK-WERKS OBLIGATORY.
    SELECT-OPTIONS T_AUFNR  FOR  AUFK-AUFNR.
    SELECT-OPTIONS T_AEDAT  FOR  AUFK-AEDAT.
    PARAMETERS     PDATA    LIKE W_DATASET1.
    CONCATENATE W_DATASET1 PDATA INTO W_DATASET1.
    PERFORM F_COLLECT_DATA.
    FORM F_COLLECT_DATA.
    OPEN DATASET W_DATASET1 FOR INPUT IN TEXT MODE encoding default.
      DO.
         IF sy-subrc <> 0.
            EXIT.
         ENDIF.
         READ DATASET W_DATASET1 INTO WA.
         APPEND WA TO ITAB2.
      ENDDO.
    CLOSE DATASET W_DATASET1.
    SELECT  AAUFNR  AAUART AAEDAT AAENAM A~WERKS
            BPLNBEZ BGAMNG B~GASMG
            CMATNR  CBDMNG C~POSNR
         INTO CORRESPONDING FIELDS OF TABLE ITAB
         FROM          ( AUFK AS A
              INNER JOIN AFKO AS B ON BAUFNR  = AAUFNR
              INNER JOIN RESB AS C ON CAUFNR  = AAUFNR )
         WHERE A~AEDAT IN T_AEDAT
           AND A~WERKS IN T_WERKS.
         LOOP AT ITAB INTO WA.
            CLEAR MAST.
            SELECT SINGLE * FROM MAST WHERE MATNR = WA-PLNBEZ
                                        AND WERKS = WA-WERKS.
            CLEAR STKO.
            SELECT SINGLE * FROM STKO WHERE STLNR = MAST-STLNR
                                        AND STLAL = MAST-STLAL.
            CLEAR STPO.
            SELECT SINGLE * FROM STPO WHERE STLNR = MAST-STLNR
                                        AND POSNR = WA-POSNR.
            WA-BMENG = STKO-BMENG.
            WA-MENGE = STPO-MENGE.
            MODIFY ITAB FROM WA.
            AT NEW AUFNR.
               SKIP.
            ENDAT.
            LOOP AT ITAB2 WHERE AUFNR = WA-AUFNR
                            AND POSNR = WA-POSNR.
                IF ITAB2-GAMNG <> WA-GAMNG OR
                   ITAB2-GASMG <> WA-GASMG OR
                   ITAB2-BDMNG <> WA-BDMNG.
                   CLEAR X_MENGE.
                   IF ITAB2-BMENG <> 0.
                      X_MENGE = ITAB2-GAMNG / ITAB2-BMENG * ITAB2-MENGE.
                   ENDIF.
                   CLEAR X_ERR.
                   IF ITAB2-BDMNG <> X_MENGE.
                      X_ERR = 'Err'.
                   ENDIF.
                   FORMAT COLOR COL_TOTAL.
                   WRITE: / ITAB2-AUART  UNDER 'Type',
                            ITAB2-AUFNR  UNDER 'Prod Order',
                            ITAB2-AEDAT  UNDER 'Last Chg Dt',
                            ITAB2-AENAM  UNDER 'Last Chg by',
                            ITAB2-WERKS  UNDER 'Plant',
                            ITAB2-PLNBEZ UNDER 'Material',
                       (10) ITAB2-GAMNG  UNDER 'Order Qty' DECIMALS 0,
                       (10) ITAB2-GASMG  UNDER 'Scrap Qty' DECIMALS 0,
                            ITAB2-POSNR  UNDER 'Item',
                            ITAB2-MATNR  UNDER 'Component Req',
                       (10) ITAB2-BDMNG  UNDER 'Req Qty' DECIMALS 0,
                            X_MENGE      UNDER 'BOM Qty' COLOR COL_TOTAL,
                            ITAB2-BMENG  UNDER 'BOM Base',
                            ITAB2-MENGE  UNDER 'BOM Comp',
                            X_ERR        UNDER 'Rmks' COLOR COL_TOTAL.
                   CLEAR X_MENGE.
                   IF WA-BMENG <> 0.
                      X_MENGE = WA-GAMNG / WA-BMENG * WA-MENGE.
                   ENDIF.
                   CLEAR X_ERR.
                   IF WA-BDMNG <> X_MENGE.
                      X_ERR = 'Err'.
                   ENDIF.
                   FORMAT COLOR OFF.
                   WRITE: / WA-AUART  UNDER 'Type',
                            WA-AUFNR  UNDER 'Prod Order',
                            WA-AEDAT  UNDER 'Last Chg Dt',
                            WA-AENAM  UNDER 'Last Chg by',
                            WA-WERKS  UNDER 'Plant',
                            WA-PLNBEZ UNDER 'Material',
                       (10) WA-GAMNG  UNDER 'Order Qty' DECIMALS 0,
                       (10) WA-GASMG  UNDER 'Scrap Qty' DECIMALS 0,
                            WA-POSNR  UNDER 'Item',
                            WA-MATNR  UNDER 'Component Req',
                       (10) WA-BDMNG  UNDER 'Req Qty' DECIMALS 0,
                            X_MENGE   UNDER 'BOM Qty' COLOR COL_TOTAL,
                            WA-BMENG  UNDER 'BOM Base',
                            WA-MENGE  UNDER 'BOM Comp',
                            X_ERR     UNDER 'Rmks' COLOR COL_TOTAL.
                 ENDIF.
            ENDLOOP.
         ENDLOOP.
       LOOP AT ITAB2.
            LOOP AT ITAB INTO WA WHERE AUFNR = ITAB2-AUFNR
                                   AND POSNR = ITAB2-POSNR.
                 DELETE ITAB2.
            ENDLOOP.
            SELECT SINGLE * FROM AUFK WHERE AUFNR = ITAB2-AUFNR.
            IF SY-SUBRC <> 0.
                 DELETE ITAB2.
            ENDIF.
       ENDLOOP.
       OPEN DATASET W_DATASET1 FOR OUTPUT IN TEXT MODE encoding default.
            LOOP AT ITAB  INTO WA.
               TRANSFER WA TO W_DATASET1.
            ENDLOOP.
            LOOP AT ITAB2 INTO WA.
               TRANSFER WA TO W_DATASET1.
            ENDLOOP.
       CLOSE DATASET W_DATASET1.
    ENDFORM.
    TOP-OF-PAGE.
        FORMAT COLOR COL_TOTAL.
        WRITE: / SY-DATUM, SY-UZEIT, SY-REPID, SY-UNAME,
             50 'Daily Qty Changed Checklist for Production Order',
            120 SY-PAGNO.
        SKIP.
        WRITE: / 'Plant ', T_WERKS-LOW.
        WRITE:   ' Last Change Date ', T_AEDAT-LOW, ' to ', T_AEDAT-HIGH.
        SKIP.
        WRITE: /1  'Type',
                6  'Prod Order',
                17 'Last Chg Dt',
                29 'Last Chg by',
                42 'Plant',
                49 'Material',
                69 'Order Qty',
                83 'Scrap Qty',
                99 'Item',
               105 'Component Req',
               121 'Req Qty',
               135 'BOM Qty',
               149 'BOM Base',
               163 'BOM Comp',
               178 'Rmks'.
         ULINE.
         WRITE: / 'Previous data :- '.
         FORMAT COLOR OFF.
         WRITE: / 'Current data :- '.
    INITIALIZATION.
       LDATE = SY-DATUM.
       LDATE  = LDATE - 1.
       FDATE = LDATE.
       MOVE:   FDATE         TO  T_AEDAT-LOW.
       APPEND T_AEDAT.
       PDATA = 'AE001'.
    So,give me any suggestions where to customize r do rectify the error.This will be useful for me.
    Advance Thnx..

    *REPORT  ZPROD.
    REPORT ZPPORDER LINE-SIZE 180 NO STANDARD PAGE HEADING
                    LINE-COUNT 058(001).
    TABLES: AUFK,  "Order master data
            AFKO,  "Order header data PP orders
            RESB,  "Reservation/dependent requirements
            MAST,  "Material to BOM Link
            STKO,  "BOM Header
            STPO.  "BOM item
    DATA: BEGIN OF WA,
             AUART      TYPE AUFK-AUART,
             AUFNR      TYPE AUFK-AUFNR,
             AEDAT      TYPE AUFK-AEDAT,
             AENAM      TYPE AUFK-AENAM,
             WERKS      TYPE AUFK-WERKS,
             PLNBEZ     TYPE AFKO-PLNBEZ,
             GAMNG      TYPE AFKO-GAMNG,
             GASMG      TYPE AFKO-GASMG,
             MATNR      TYPE RESB-MATNR,
             POSNR      TYPE RESB-POSNR,
             BDMNG      TYPE RESB-BDMNG,
             BMENG      TYPE STKO-BMENG,
             MENGE      TYPE STPO-MENGE,
          END OF WA,
          ITAB  LIKE SORTED   TABLE OF WA WITH NON-UNIQUE KEY AUFNR POSNR.
    DATA: BEGIN OF ITAB2 OCCURS 0.
          INCLUDE STRUCTURE WA.
    DATA: END OF ITAB2.
    DATA: BEGIN OF ITAB_AUFK OCCURS 0,
          AUART  LIKE AUFK-AUART,
          AUFNR  LIKE AUFK-AUFNR,
          POSNR  LIKE RESB-POSNR,
          AEDAT  LIKE AUFK-AEDAT,
          AENAM  LIKE AUFK-AENAM,
          WERKS  LIKE AUFK-WERKS,
          PLNBEZ LIKE AFKO-PLNBEZ,
          GAMNG(7) TYPE P DECIMALS 0,
          GASMG(7) TYPE P DECIMALS 0,
          MATNR  LIKE RESB-MATNR,
          BDMNG(7) TYPE P DECIMALS 0,
          BMENG(7) TYPE P DECIMALS 0,
          MENGE(7) TYPE P DECIMALS 3.
    DATA: END OF ITAB_AUFK.
    DATA: FDATE LIKE SY-DATUM,
          LDATE LIKE SY-DATUM.
    DATA: X_AUFNR LIKE AFKO-AUFNR,
          X_MENGE(7) TYPE P DECIMALS 0,
          X_ERR(3).
    DATA: W_DATASET1(500) VALUE '/usr/sap/trans/data/'.
    SELECT-OPTIONS T_WERKS  FOR  AUFK-WERKS OBLIGATORY.
    SELECT-OPTIONS T_AUFNR  FOR  AUFK-AUFNR.
    SELECT-OPTIONS T_AEDAT  FOR  AUFK-AEDAT.
    PARAMETERS     PDATA    LIKE W_DATASET1.
    CONCATENATE W_DATASET1 PDATA INTO W_DATASET1.
    PERFORM F_COLLECT_DATA.
    FORM F_COLLECT_DATA.
    OPEN DATASET W_DATASET1 FOR INPUT IN TEXT MODE encoding default.
      DO.
         IF sy-subrc <> 0.
            EXIT.
         ENDIF.
         READ DATASET W_DATASET1 INTO WA.
         APPEND WA TO ITAB2.
      ENDDO.
    CLOSE DATASET W_DATASET1.
    SELECT  AAUFNR  AAUART AAEDAT AAENAM A~WERKS
            BPLNBEZ BGAMNG B~GASMG
            CMATNR  CBDMNG C~POSNR
         INTO CORRESPONDING FIELDS OF TABLE ITAB
         FROM          ( AUFK AS A
              INNER JOIN AFKO AS B ON BAUFNR  = AAUFNR
              INNER JOIN RESB AS C ON CAUFNR  = AAUFNR )
         WHERE A~AEDAT IN T_AEDAT
           AND A~WERKS IN T_WERKS.
         LOOP AT ITAB INTO WA.
            CLEAR MAST.
            SELECT SINGLE * FROM MAST WHERE MATNR = WA-PLNBEZ
                                        AND WERKS = WA-WERKS.
            CLEAR STKO.
            SELECT SINGLE * FROM STKO WHERE STLNR = MAST-STLNR
                                        AND STLAL = MAST-STLAL.
            CLEAR STPO.
            SELECT SINGLE * FROM STPO WHERE STLNR = MAST-STLNR
                                        AND POSNR = WA-POSNR.
            WA-BMENG = STKO-BMENG.
            WA-MENGE = STPO-MENGE.
            MODIFY ITAB FROM WA.
            AT NEW AUFNR.
               SKIP.
            ENDAT.
            LOOP AT ITAB2 WHERE AUFNR = WA-AUFNR
                            AND POSNR = WA-POSNR.
                IF ITAB2-GAMNG <> WA-GAMNG OR
                   ITAB2-GASMG <> WA-GASMG OR
                   ITAB2-BDMNG <> WA-BDMNG.
                   CLEAR X_MENGE.
                   IF ITAB2-BMENG <> 0.
                      X_MENGE = ITAB2-GAMNG / ITAB2-BMENG * ITAB2-MENGE.
                   ENDIF.
                   CLEAR X_ERR.
                   IF ITAB2-BDMNG <> X_MENGE.
                      X_ERR = 'Err'.
                   ENDIF.
                   FORMAT COLOR COL_TOTAL.
                   WRITE: / ITAB2-AUART  UNDER 'Type',
                            ITAB2-AUFNR  UNDER 'Prod Order',
                            ITAB2-AEDAT  UNDER 'Last Chg Dt',
                            ITAB2-AENAM  UNDER 'Last Chg by',
                            ITAB2-WERKS  UNDER 'Plant',
                            ITAB2-PLNBEZ UNDER 'Material',
                       (10) ITAB2-GAMNG  UNDER 'Order Qty' DECIMALS 0,
                       (10) ITAB2-GASMG  UNDER 'Scrap Qty' DECIMALS 0,
                            ITAB2-POSNR  UNDER 'Item',
                            ITAB2-MATNR  UNDER 'Component Req',
                       (10) ITAB2-BDMNG  UNDER 'Req Qty' DECIMALS 0,
                            X_MENGE      UNDER 'BOM Qty' COLOR COL_TOTAL,
                            ITAB2-BMENG  UNDER 'BOM Base',
                            ITAB2-MENGE  UNDER 'BOM Comp',
                            X_ERR        UNDER 'Rmks' COLOR COL_TOTAL.
                   CLEAR X_MENGE.
                   IF WA-BMENG <> 0.
                      X_MENGE = WA-GAMNG / WA-BMENG * WA-MENGE.
                   ENDIF.
                   CLEAR X_ERR.
                   IF WA-BDMNG <> X_MENGE.
                      X_ERR = 'Err'.
                   ENDIF.
                   FORMAT COLOR OFF.
                   WRITE: / WA-AUART  UNDER 'Type',
                            WA-AUFNR  UNDER 'Prod Order',
                            WA-AEDAT  UNDER 'Last Chg Dt',
                            WA-AENAM  UNDER 'Last Chg by',
                            WA-WERKS  UNDER 'Plant',
                            WA-PLNBEZ UNDER 'Material',
                       (10) WA-GAMNG  UNDER 'Order Qty' DECIMALS 0,
                       (10) WA-GASMG  UNDER 'Scrap Qty' DECIMALS 0,
                            WA-POSNR  UNDER 'Item',
                            WA-MATNR  UNDER 'Component Req',
                       (10) WA-BDMNG  UNDER 'Req Qty' DECIMALS 0,
                            X_MENGE   UNDER 'BOM Qty' COLOR COL_TOTAL,
                            WA-BMENG  UNDER 'BOM Base',
                            WA-MENGE  UNDER 'BOM Comp',
                            X_ERR     UNDER 'Rmks' COLOR COL_TOTAL.
                 ENDIF.
            ENDLOOP.
         ENDLOOP.
       LOOP AT ITAB2.
            LOOP AT ITAB INTO WA WHERE AUFNR = ITAB2-AUFNR
                                   AND POSNR = ITAB2-POSNR.
                 DELETE ITAB2.
            ENDLOOP.
            SELECT SINGLE * FROM AUFK WHERE AUFNR = ITAB2-AUFNR.
            IF SY-SUBRC <> 0.
                 DELETE ITAB2.
            ENDIF.
       ENDLOOP.
       OPEN DATASET W_DATASET1 FOR OUTPUT IN TEXT MODE encoding default.
            LOOP AT ITAB  INTO WA.
               TRANSFER WA TO W_DATASET1.
            ENDLOOP.
            LOOP AT ITAB2 INTO WA.
               TRANSFER WA TO W_DATASET1.
            ENDLOOP.
       CLOSE DATASET W_DATASET1.
    ENDFORM.
    TOP-OF-PAGE.
        FORMAT COLOR COL_TOTAL.
        WRITE: / SY-DATUM, SY-UZEIT, SY-REPID, SY-UNAME,
             50 'Daily Qty Changed Checklist for Production Order',
            120 SY-PAGNO.
        SKIP.
        WRITE: / 'Plant ', T_WERKS-LOW.
        WRITE:   ' Last Change Date ', T_AEDAT-LOW, ' to ', T_AEDAT-HIGH.
        SKIP.
        WRITE: /1  'Type',
                6  'Prod Order',
                17 'Last Chg Dt',
                29 'Last Chg by',
                42 'Plant',
                49 'Material',
                69 'Order Qty',
                83 'Scrap Qty',
                99 'Item',
               105 'Component Req',
               121 'Req Qty',
               135 'BOM Qty',
               149 'BOM Base',
               163 'BOM Comp',
               178 'Rmks'.
         ULINE.
         WRITE: / 'Previous data :- '.
         FORMAT COLOR OFF.
         WRITE: / 'Current data :- '.
    INITIALIZATION.
       LDATE = SY-DATUM.
       LDATE  = LDATE - 1.
       FDATE = LDATE.
       MOVE:   FDATE         TO  T_AEDAT-LOW.
       APPEND T_AEDAT.
       PDATA = 'AE001'.

Maybe you are looking for