Genralizing the data export script

hi , i have a data export script which is a busienss rule , now i can genralize the version, year, scenario and everything but the problem is that the export file that is being created has the fixed name , each time i run that rule the data export file will be same , is it possible to somehow genralize that export file name as well.

hi here is some java which i have used earlier try to modify ...it saves with current date and time and even i used SED all this in UNIX
JAVA
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.io.Writer;
import java.util.Calendar;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
* @author KVC
public class MigratorUtil {
     public static File getLatestFile(String dir){
          File directory = new File(dir);
          File choice = null;
          if(directory.isDirectory()){
               File[] files = directory.listFiles(new FileFilter() {
                    public boolean accept(File file) {
                         return file.isFile();
               long lastMod = Long.MIN_VALUE;
               int fileSize = files.length;
               for(int i=0;i<fileSize;i++){
                    File file = files;
                    if(file.lastModified() >lastMod){
                         choice = file;
                         lastMod = file.lastModified();
          }else{
               System.out.println(dir+" is not a directory");
          return choice;
     public static boolean processFile(File latestFile) throws FileNotFoundException,Exception{
          FileInputStream fileStream = new FileInputStream(latestFile);
          DataInputStream in = new DataInputStream(fileStream);
          BufferedReader br = new BufferedReader(new InputStreamReader(in));
          String strLine;
          int lineCount = 0;
          StringBuffer contents = new StringBuffer();
          while((strLine = br.readLine())!=null){
               if(lineCount == 0){ //first line
                    String header = System.getProperty("header");
                    if(header == null){
                         header = "HEADERHYPERION";
                    contents.append(header).append(getPreviousBusinessDate()).append(getDateFormat()).append("\n");
               }else{
                    contents.append(strLine).append("\n");
               lineCount++;
          //footer
          if(lineCount != 0){ //last line
               String footer = System.getProperty("header");
               if(footer == null){
                    footer = "TRAILER";
               contents.append(footer).append(lineCount-1); // linecount - 1 to remove the first line count
          // wtite the file
          String fileName = latestFile.getAbsoluteFile().toString();
          String outputFile = fileName.substring(0,fileName.indexOf("."))+".out";
          System.out.println(" output file is ..."+outputFile);
          File output = new File(outputFile);
          Writer oWriter = new BufferedWriter(new FileWriter(output));
          try{
               oWriter.write(contents.toString());
//               oWriter.write(getProcessedLine(contents.toString()));
          }finally{
               oWriter.close();
          return true;
     public static String getDateFormat(){
          Calendar calendar = Calendar.getInstance();
          int currentMonth = calendar.get(Calendar.MONTH);
          return calendar.get(Calendar.YEAR)+"-"+(currentMonth>9?""+currentMonth:"0"+currentMonth)+"-01";
     public static String getPreviousBusinessDate(){
          Calendar calendar = Calendar.getInstance();
          int currentMonth = calendar.get(Calendar.MONTH);
          calendar.set(Calendar.MONTH, currentMonth-1);
          int lastDate = calendar.getActualMaximum(Calendar.DATE);
          calendar.set(Calendar.DATE, lastDate);
          int lastDay = calendar.get(Calendar.DAY_OF_WEEK);
          if(lastDay == 1 ){
               lastDate = lastDate - 2; // for sunday
          }else if(lastDay == 7){
               lastDate = lastDate - 1; // for saturday
          return calendar.get(Calendar.YEAR)+"-"+(currentMonth>9?""+currentMonth:"0"+currentMonth)+"-"+lastDate;
     private static String getProcessedLine(String line){
          String seperator = System.getProperty("inputseperator");
          String out_seperator = System.getProperty("outputseperator");
          if(seperator == null){
               seperator = "!";
          if(out_seperator == null){
               out_seperator = "|";
          StringTokenizer tokenizer = new StringTokenizer(line,seperator);
          StringBuffer descContent = new StringBuffer();
          StringBuffer content = new StringBuffer();
          while(tokenizer.hasMoreTokens()){
               String element = tokenizer.nextToken();
               if(matchPattern(element)){
                    System.out.println("Criteria matched..."+element+ "So eat the next elemet");
                    descContent.append(tokenizer.nextElement()).append(out_seperator);
               }else{
                    content.append(element).append(out_seperator);
          content.append(descContent);
          String output = content.toString();
          return output.substring(0, output.length()-1);
     private static boolean matchPattern(String line){
          String regex = "\\d{1,2}.\\d{1,2}.\\d{1,2}";
          Pattern pattern = Pattern.compile(regex);
          Matcher m = pattern.matcher(line);
          return (m.matches());
     public static void main(String a[]){
          System.out.println(getPreviousBusinessDate());
SED
for file to
#!/bin/bash
#Replace tab with pipe
cat $1 | sed 's/\t/|/g' > /tmp/test.out
line_cnt=`wc -l $1 | awk '{print expr $1-2}'`
if [ `uname -s` = 'SunOS' ]; then
set -A months 0 1 2 3 4 5 6 7 8 9 10 11 12
else
months=(0 1 2 3 4 5 6 7 8 9 10 11 12)
fi
here it takes last month date similarly u can change up to ur requirements
YEAR="`date +%Y`"
MONTH="${months[`date +%m-1`]}"
TODAY_STR="`date +%Y`-${months[`date +%m`]}-01"
DAY="`cal $MONTH $YEAR | awk '{ if(NF>1) a=$NF ; if (NF==7) a=$6}END{print a}'`"
LAST_MNTH="`date +%Y`-${months[`date +%m-1`]}-$DAY"
cat /tmp/test.out | sed -e "s/HEADERHYPERION/HEADERHYPERION${LAST_MNTH}${TODAY_STR}/" > /tmp/test_tmp.out
cat /tmp/test_tmp.out | sed -e "s/TRAILER/TRAILER${line_cnt}/" > $2

Similar Messages

  • Create the Box and increase the height a/c to the data in Script

    hi,
    i need to create one box with 6 Columns in a Script and also it should be increase the size a/c to the Data. 
    please send me the code, this is very urgent,
    Thanks & regards,
    Sunil

    BOX, POSITION, & SIZE: These commands are used for drawing boxes and are used only during creating output.
    Syntax:
    /: BOX [Xpos]  [Ypos]  [Width]  [Height] [Frame]  [Intensive]
    X & Y – Upper left corner of the box.
    Width – Width of the box
    Ht – Height of the box
    Frame – Thickness of the box (Default is full black)
    Units used for Width, Height and Thickness are TW, PT, IN, CM, CH, LN.
    Ex., /: BOX WIDTH ‘20’ CM HEIGHT 1 IN
    FRAME 10 TW INTENSIFY 15.

  • Unable to execute data export calc script

    HI Experts,
    When I am trying to execute the data export calculation script I am getting the error like "Failed to establish the connection with SQL database".
    set dataexportoptions
    DataExportLevel "All";
    DataExport "DSN" "Test_DSN" "Test_table" "sa" "sa999";
    I have executed the calculation script by accessing the Essbase administration services through citix.
    Is there a way to export data from Essbase cube to sql database through citix

    First of all did you set the DSN up on the essbase server and not the EAS if it is different, if so did you test the DSN.
    Have you checked the essbase logs to see if they have more information.
    Cheers
    John
    http://john-goodwin.blogspot.com/

  • Essbase Data Export not Overwriting existing data file

    We have an ODI interface in our environment which is used to export the data from Essbase apps to text files using Data export calc scripts and then we load those text files in a relational database. Laetely we are seeing some issue where the Data Export calc script is not overwriting the file and is just appending the new data to the existing file.
    The OverWriteFile option is set to ON.
    SET DATAEXPORTOPTIONS {
         DataExportLevel "Level0";
         DataExportOverWriteFile ON;     
    DataExportDimHeader ON;
         DataExportColHeader "Period";
         DataExportDynamicCalc ON;
    The "Scenario" variable is a substitution variable which is set during the runtime. We are trying to extract "Budget" but the calc script is not clearing the "Actual" scenario from the text file which was the scenario that was extracted earlier. Its like after the execution of the calc script, the file contains both "Actual" and "Budget" data. We are not able to find the root cause as in why this might be happening and why OVERWRITEFILE command is not being taken into account by the data export calc script.
    We have also deleted the text data file to make sure there are no temporary files on the server or anything. But when we ran the data export directly from Essbase again, then again the file contained both "Actual" as well as "Budget" data which really strange. We have never encountered an issue like this before.
    Any suggestions regarding this issue?

    Did some more testing and pretty much zoomed on the issue. Our Scenario is actually something like this "Q1FCST-Budget", "Q2FCST-Budget" etc
    This is the reason why we need to use a member function because Calc Script reads "&ODI_SCENARIO" (which is set to Q2FCST-Budget) as a number and gives an error. To convert this value to a string we are using @member function. And, this seems the root cause of the issue. The ODI_Scenario variable is set to "Q2FCST-Budget", but when we run the script with this calculation function @member("&ODI_SCENARIO"), the data file brings back the values for "Q1FCST-Budget" out of nowhere in addition to "Q2FCST-Budget" data which we are trying to extract.
    Successful Test Case 1:
    1) Put Scenario "Q2FCST-Budget" in hard coded letters in Script and ran the script
    e.g "Q2FCST-Phased"
    2) Ran the Script
    3) Result Ok.Script overwrote the file with Q2FCST-Budget data
    Successful Case 2:
    1) Put scenario in @member function
    e.g. @member("Q2FCST-Budget")
    2) Results again ok
    Failed Case:
    1) Deleted the file
    2) Put scenario in a substitution variable and used the member function "@member("&ODI_Scenario") and Ran the script . *ODI_SCENARIO is set to Q@FCST-Budget in Essbase variables.
    e.g. @member("&ODI_SCENARIO")
    3) Result : Script contained both "Q1FCST-Budget" as well as "Q2FCST-Budget" data values in the text file.
    We are still not close to the root cause and why is this issue happening. Putting the sub var in the member function changes the complete picture and gives us inaccurate results.
    Any clues anyone?

  • Adding Extended Attributes for Data Exporter

    Hi, I'm having trouble in getting an exported attribute to export within the new Data Exporter feature. In fact, once i alter the export schema to include the additional column (ext att), it won't write to that table at all.
    I am doing the following per the documentation from Sun, hopefully someone can point out the error of my way.
    First I alter the IDM Schema Configuration.xml file to include the additional User extended attribute.
    Next, I alter the model-export.xml file under model name='User' within WS_HOME to include the additional attribute here, my entry is as follows:
    <field name='employeeId'
    type='java.lang.String'
    introduced='8.0'
    max-length='50'
    forensic='User'
    queryable='true'
    exported='true'
    friendlyName='Employee Id'>
    <description>The Peoplesoft ID coming over</description>
    </field>
    Next, I go to the unzipped directory of IDM hence, \exporter and execute: "ant rebuild" and "ant deploy".
    The following takes place, The rebuild process regenerates my create and drop schema configuration scripts for MySQL. I run both scripts and
    my new column with the newly added extended attribute appears in the EXT_USER table. I also issue the proper permission commands on the tables
    I then undeploy my web application from Tomcat and re-deploy the web app.
    I start the server, log into IDM and go to the Data Exporter configuration page. Under the User data type, my extended attribute does not show up. Further, what once worked, the scheduler now does not write to the EXT_USER table and gives the following error when I believe IDM is starting up. I have no doubt that this is a clue as to why it is not working:
    "StartupServlet: Defining properties from web.xml
    Starting: Identity Server...
    ...Finished starting Startup Servlet
    {Model=User, employeeId=[], assignedRoles=[], idmManager=, businessPhone=[], location=[], MemberObjectGroups=[(id=#ID#Top)], lhdis=true, lhlocked=false, controlledObjectGroupsRule=, ACCT_CD=[], lastModDate=Tue Oct 21 16:44:21 PDT 2008, failedPasswordLoginAttemptsCount=0, description=, lastModifier=Configurator, role=[], divisionCode=[], companyMobilePhoneNumber=[], fullname=Anuradha Rao, employeeType=[], CompanyMailingAddress=[], objectClass=[Top, Object, Principal, User], hasCapabilities=false, questionLocked=false, [email protected], subtype=, managerId=[], sponsorId=[], contractorLocation=[], jobCode=[], locked=false, failedQuestionLoginAttemptsCount=0, xmlSize=936, res=[], repoMod=Tue Oct 21 16:44:21 PDT 2008, accountId=, textPagerEmailAddress=[], lastname=Rao, lastAuditorScan=, user_resources=[], creator=Configurator, id=#ID#54D7-:3AE94AA9C11:110E5685:2FC0FC1B3DEDDBF6, title=[], idmManagerNameNotFound=, faxNumber=[], facility=[], dis=0, lastPasswordUpdate=Wed Oct 01 15:31:47 PDT 2008, name=definabl, authType=, effectiveDate=[], createDate=Thu Sep 25 10:56:13 PDT 2008, jobTitle=[], prov=2, accounts=[], ControlledObjectGroups=[], costCenter=[], firstname=Anuradha, correlationKey=, primaryObjectClass=User, departmentName=[], roleInfo=[], accountType=, middleInitial=[], departmentId=[], displayName=, cubeNumber=[], disabled=true, }
    ex: java.lang.reflect.InvocationTargetException"
    java.lang.reflect.InvocationTargetException is thrown which leads me to believe that something is wrong with the javabean that is regenerated under the ant rebuild and the underlying User.hbm.xml file that regenerated as well. I can see from the User.hbm file that the new extended attribute has been added but this is as far as I have gotten. I really don't know where to go from here.
    Any and all help is extremely appreciated.
    Thank you. Dan

    Hi Nik,
    From section 1.4.2 of the install guide.
    https://docs.oracle.com/middleware/1213/edq/DQINS/planning.htm#DQINS5205
    EDQ Configuration Directories
    EDQ requires two configuration directories, which are separate from the EDQ Home (installation) directory that contains the program files. The configuration directories are:
    The base configuration directory: This directory contains default configuration data. Once EDQ is installed, the files in the base configuration directory must not be altered, renamed, or moved.
    The local configuration directory: This directory contains overrides to the default configuration. EDQ looks for overrides in this directory first, before looking in the base configuration directory. Files in the local configuration directory can be modified to customize or extend EDQ.
    The names and locations of the configuration directories are as follows:
    If you are using Oracle WebLogic Server, the Oracle installation wizard automatically creates and populates the configuration directories in the EDQ domain with the names of oedq.home (base configuration directory) and oedq.local.home (local configuration directory). An example installation path is: WLS_HOME/user_projects/domains/edq_domain/edq/oedq.home
    WLS_HOME/user_projects/domains/edq_domain/edq/oedq.local.home
    If you are using Apache Tomcat, you create the configuration directories manually in any location, with any names, and the configuration utility will populate them. You are prompted to create the directories during the installation instructions.
    Just copy flags.xml from your oedq.home casemanagement folder to your oedq.local.home casemanagement folder and edit the file accordingly.
    thanks,
    Nick

  • Unable to stop incorrect date exports

    How do we set up a form in Adobe Acrobat XI that allows dates to be formatted a certain way (mmm/dd/yyyy) and exported in the same way to Excel and always be recognized as a "proper" date in Excel?
    Currently the following does not work (Attempt #1):
    Set up a field; Set the format category as date; Set up a Custom format of "mmm/dd/yyyy"
    Create a distribution file
    When users fill out the form if they type in an incorrect date, eg., "August 27 2013", the form automagically shows the date on the PDF as "Aug/27/13" - Great!
    When the users submit the form and it's brought into the response file the dates are shown in a default date format of mm/dd/yyyy - Fine, once the form owners understand this
    When the form owners export the information the data exported is the same as the original users entered it, not as it was automagically formatted to. For instance, if submitters originally entered "August 27, 2013" then that's what goes across to Excel. And some of these formats Excel doesn't know how to convert. - Understandably frustrating for form owners
    Attempt #2: As a workaround we set up special formatting that has a mask of "AAA/99/9999". This at least provides forces the users to use the same formatting, but is confusing the submitters when they need to enter dates from 1-9 and we've also found that the conversion of this format to a date in Excel doesn't work, but at least it's consistent! Javascript was also added to force users to use specific month abbreviations.
    d = new Date(event.value.replace(/-/g, " "));
    if (!Date.parse(d)){app.alert("Please enter in a proper date.\n\nThe month abbreviations are: Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec");}
    Attempt #3: The last attempt was to continue to use the set up as Attempt #1, but to also use the javascript from Attempt #2. The theory being that if a user entered in "August 27 2013" the javascript would complain. Alas, the javascript appears to run after Adobe automagically does its date format conversion.
    Does anyone know how to get around this or have any other ideas to either enforce a usable date format or have Adobe export the dates as they've been automatically formatted to? We've tried to find a way to turn off the automatic date conversion that Adobe's running, but haven't found a way yet. Another option seemed to be to allow a masking that allowed for optional characters (so that the "0" wouldn't be needed for the dates 1-9) but there doesn't seem to be one.
    Thanks in advance!

    Since there was no clear way to ensure that the date formatting was correct prior to exporting, we're going to get the respondants to use drop downs to ensure the formatting is correct. Not the most convenient for the users though as they're accustomed to being able to type in the values to select it (e.g., for the date of 23 they would expect to enter 2 then 3 for 23) based on other applications, but the Adobe pull downs don't "group" what's been entered (e.g., 2 then 3 will select 30, not 23) and so it will take them a bit to get used to it. I still can't believe that Adobe wouldn't simply export what it's been formatted to though... after all that's what we set the form up for.

  • Problems Generating Data Move Scripts in v1.5.1

    Hi, I'm having problems when trying to generate data move scripts in SQL Dev 1.5.1 to carry out an off-line data load. I'm carrying out a migration from Sybase to Oracle and the database I'm working on has over 400 tables in it. I have successfully captured and migrated all the tables into the resp. models and have generated and created the DDL for the converted model. However, when I request the data move scripts to be generated I'm only getting ctl files created for the 1st 49 tables. Also, there is no oracle_ctl.sh script created. Also, no post_load.sql script is produced only a pre_load.sql script.
    I've got 3 databases to migrate and on the 2nd database I only get the data move scripts created for ths first 86 tables and there are over 250 tables in it.
    It appears to have worked better for the 3rd database which is much much smaller than the 1st two databases having only 59 tables in it. This time all the files were produced as expected. However, it's really the 1st two larger databases that are my priority to get migrated.
    I've tried changing the preferences within Migration/Generation Options to from 'One Single File' to 'A File per Object' but it makes no difference. I would prefer everything in one file but can work round that.
    Ideally, I'd like to generate all the ctl files for a database in one go so that I can group edit them and would prefer the tool to create the oracle_ctl.sh script to call all the ctl scripts for me rather than having to hand build it. I'm puzzled as to why the tool only creates ctl files for some of the tables contained within a converted model. it looks like it is not completing the job in these cases as it also doesn't create all the scripts that it is supposed to create either. It doesn't give out any error messages and the screen looks no different at completion to when it works successfully in the case of the very small database.
    Anybody had this problem or can suggest how to fix it ?
    Thanks all.

    Send me you phone number to [email protected]
    We'll help sort this out.
    Barry

  • JRE1.5 swing.html parser fails to parse data between script tags

    Hi all...
    I've written a class that extends the java-provided default HTML parser to parse for text inside a table. In JRE1.4.* the parser works fine and extracts data between <script> tags as text. However now that I've upgraded to 1.5, the data between <script> tags are no longer parsed. Any suggestion anyone?
    Steve

    According to the API docs, the 1.5 parser supports HTML 3.2, for which the spec states that the content of SCRIPT and STYLE tags should not be rendered. I assume it doesn't have a scripting engine, so it won't get executed either.

  • Data export/import , different OS's

    HI,
    i have to export data from 10.2.0.1 database which is on Windows OS and import that data into a 9.2.0.6 database which is on IBM AIX Platform
    How can i perform the data export/import between different OS's?
    Kindly suggest
    Thanks
    SK

    You can use exp from your AIX machine DB, as it is 9i, it works as 9i client
    exp username/password@your_10gdb_ip_address/10gdb_instance_name file='your AIX path to store the dump' .......
    e.g: exp hr/[email protected]/orcl file='xxx' .........
    or if you have a tnsname entry configured for 10db in AIX 9i then use
    exp user/pass@tnsname file='xxxx'.....
    e.g: hr/hr@10gdb_on_win file='xxxxx' ........
    then import it from AIX by
    imp file='path of the dump reside in AIX' ..................
    Edited by: adnanKaysar on Mar 18, 2010 3:10 PM

  • Data Export Question

    Hi Experts,
    Is there a way to use the data export function to create a data extract file that won't put double quotes around the members?
    Thank You!

    If extracting to a flat file, I don't think so. If you extract to a relational source it does not include the quotes.

  • NAM data export using NDE

    Hi,
    Can some one help me in providing an example for all type of data collected by NAM using NDE export feature.
    As per NAM's guide, NAM export following type of data.
    Host
    Application
    Client Server Application
    Application conversation
    Network conversation
    RTP Metrics
    I just wanted to check all the data exported by NAM. An example for each type host,application etc.
    Thanks,
    Parul

    Hi,
    You can't exclude dimensions while taking a data export.Even if you don't specify a dimension in a fix statement then
    it will export all the blocks related to the missing dimension(level 0 and upper level blocks)
    @REMOVE will not remove the Dimension members from a FIX statement but it will remove the subset of the members fixed for a particular dimension.
    EG: FIX(@REMOVE(@RELATIVE("Entity",0),"E1","E2")) where E1 and E2 are again level 0 members which you are fixing in the FIX statement. Here E1, and E2 will not be considered for any calculation or in your case for the export.
    If you want to load the export from this cube to any other cube with less no. of dimensions than the current cube, you can use the load rules to load the data in the other cube by ignoring the columns corresponding to the dimension which are not there in the target cube.
    Thanks,
    CM

  • How to export out the date into the csv file?

    Hi, I had been trying to export out the value of the date to csv file.
    This is the  script:
    $strADPath = 'LDAP://dc=test,dc=com'
    function ConvertLargeIntegerToDate([object]$LargeInteger){
    try
    $int64 = ConvertLargeIntegerToInt64 ($LargeInteger)
    if ($int64 -gt 0)
    $retDate = [datetime]::fromfiletime($int64)
    else
    $retDate = $null
    return $retDate
    catch
    return $null
    $objSearch = New-Object DirectoryServices.DirectorySearcher
    $objSearch.Filter = '(&(objectClass=user)(samaccountname=user1))'
    $objSearch.SearchRoot = $strADPath
    $objSearch.PageSize = 1000
    $objSearch.SearchScope = "subtree"
    $objResults = $objSearch.Findall()
    $dateAccountExpires = ConvertLargeIntegerToDate $objUser.accountexpires[0]
    Write-Host "date Account expires: " $dateAccountexpires
    $objResults| ForEach-Object{
    $_.GetDirectoryEntry()
    } |
    Select-Object -Property @{Name="sAMaccountName";Expression={$_.sAMAccountName}},
    @{Name="cn";Expression={$_.cn}},
    @{Name="name";Expression={$_.name}},
    @{Name="manager";Expression={$_.manager}},
    @{Name="givenName";Expression={$_.givenName}},
    @{Name="accountExpires";Expression={$_.dateAccountExpires}},
    @{Name="department";Expression={$_.department}} |
    Export-Csv -Path 'D:\test44.csv'
    This is what I get in PowerShell ISE:
    This is what I had get for the csv file for the expire date:

    hi FWN,
    the code had giving me error saying that it could not call method on a null-value expression.
    $temp = $_.Properties
    the code had gave error saying that it could not call method on a null-value expression.
    $properties | %{ $res.$_ = $temp.Item($_) }
    the code had gave error saying that it could not call method on a null-value expression.
    with lot of thanks
    noobcy

  • Export the data using ODI

    I want to export the data using Odi from essbase v9 cubes. ODI and essbase server and in different servers.
    I am using the calculation script in the LKM part, but the exported files are in Essbase servers but ODI not able to recognize the exported files.
    Report script is taking too much of time to export the data therefore using the calculation script.
    Is this something related to the agent? However I am not able to create an agent to the essbase server. I am succesful in creating the agent for the local system where odi is installed.
    Please suggest.
    Regards,
    Murali

    Are you on 10.1.3.6.x? Then: http://john-goodwin.blogspot.com/2008/09/odi-series-part-2-agent.html
    Are you on 11g? Then: http://john-goodwin.blogspot.com/2010/12/managing-odi-11g-standalone-agents.html
    I will say with only a mild amount of shame and a large amount of gratitude that I installed both releases' agents through John's blog posts.
    Regards,
    Cameron Lackpour
    Edited by: CL on Jun 4, 2012 5:48 PM
    Whoops, had the same link in there twice.

  • Help required to manipulate the data without exporting the output twice

    I am running a script which export the data to CSV file and I re-import the same file to remove the duplicate rows and them export them.
    Need to clean the up the first file after the script execution
    Is it possible to store the data in an object without export and import for manipulating the data
    &{foreach($role in Get-VIRole){
        Get-VIPrivilege -Role $role -ErrorAction SilentlyContinue |
        Select @{N="Role";E={$role.Name}},@{N="Assigned Privileges";E={$_.ID}}
    }} | Export-Csv C:\Scripts\Permission.CSV -NoTypeInformation -UseCulture 
    &{foreach($row in (Import-Csv C:\Scripts\Permission.CSV -UseCulture)){   
            if($prevRole -and $row.Role -eq $prevRole){
                $role = ""
            Else{
                $role = $prevRole = $row.Role
            New-Object PSObject -Property @{
                Role = $role
               "Assigned privileges" = $row."Assigned Privileges"
        }} | Select "Role","Assigned Privileges" | Export-CSV

    If I understand the question correctly, I think this might work.
    $ht = @{}
    foreach($role in Get-VIRole){
    Get-VIPrivilege -Role $role -ErrorAction SilentlyContinue |
    foreach { $ht[$role.name] = $_.ID }
    $ht.GetEnumerator() |
    foreach {
    New-Object PSObject -Property @{Role=$_.name;'Assigned Privileges' = $_.value}
    } | export-csv
    That uses a hash table to de-dupe the role/assignment pairs and then builds the objects for export from the hash table.
    [string](0..33|%{[char][int](46+("686552495351636652556262185355647068516270555358646562655775 0645570").substring(($_*2),2))})-replace " "

  • How to export the data to excel in OAF Page - Urgent

    Hi All,
    I have a developed a page wich dispaly the records based on some condition.
    The Page has two regions.
    1.RowLayout Region
    2.Table Region
    Both the regions are getting the data from two diffrent VOs.
    In this case how to export the data to excel as the export button will not work if the page uses more than one VO(I think).
    Please help me.
    Thanks,
    Srinivas
    Edited by: SrinivasChanda on Oct 7, 2009 10:07 AM

    Hi Gaurav,
    Yes you are rite.When i tried exporting the data(which is coming from two different VOs altogether),i got only one region's data and then got error.
    Please note: Two regions(RowLayout,Table) are getting data from two different VOs
    below is one of the region(row layout) data:
    Hierarchy     Learning Certification Status     Learning Certification     Supervisor
    Direct Reports     Passed Only     All     Michael Swinnerton
    below is the error:               
    <div CLASS="errorText">               
    <html lang="en-US">               
    <head>               
    <script>               
    function ignoreWarnAboutChanges(url)               
    document.location.href = url;               
    </script>                
    <title>Error Page</title>               
    <link rel="stylesheet" charset="UTF-8" type="text/css" href="/OA_HTML/cabo/styles/blaf.css">               
    <META name="fwk-error" content="Error occured while processing the request">               
    <META name="fwk-error-detail" content="oracle.apps.fnd.framework.OAException: Application: FND     Message Name: FND_GENERIC_MESSAGE. Tokens: MESSAGE = java.io.IOException: Stream closed;           
    "     at oracle.apps.fnd.framework.OAException.wrapperException(OAException.java:891)"               
    "     at oracle.apps.fnd.framework.OAException.wrapperException(OAException.java:865)"               
    "     at OAErrorPage.jspService(OAErrorPage.jsp:34)"               
    "     at com.orionserver.http.OrionHttpJspPage.service(OrionHttpJspPage.java:56)"               
    "     at oracle.jsp.runtimev2.JspPageTable.service(JspPageTable.java:317)"               
    "     at oracle.jsp.runtimev2.JspServlet.internalService(JspServlet.java:465)"               
    "     at oracle.jsp.runtimev2.JspServlet.service(JspServlet.java:379)"               
    "     at javax.servlet.http.HttpServlet.service(HttpServlet.java:853)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.invoke(ServletRequestDispatcher.java:727)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.include(ServletRequestDispatcher.java:119)"               
    "     at com.evermind.server.http.EvermindPageContext.handlePageThrowable(EvermindPageContext.java:547)"               
    "     at com.evermind.server.http.EvermindPageContext.handlePageException(EvermindPageContext.java:518)"               
    "     at OAExport.jspService(OAExport.jsp:122)"               
    "     at com.orionserver.http.OrionHttpJspPage.service(OrionHttpJspPage.java:56)"               
    "     at oracle.jsp.runtimev2.JspPageTable.service(JspPageTable.java:317)"               
    "     at oracle.jsp.runtimev2.JspServlet.internalService(JspServlet.java:465)"               
    "     at oracle.jsp.runtimev2.JspServlet.service(JspServlet.java:379)"               
    "     at javax.servlet.http.HttpServlet.service(HttpServlet.java:853)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.invoke(ServletRequestDispatcher.java:727)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.forwardInternal(ServletRequestDispatcher.java:306)"               
    "     at com.evermind.server.http.HttpRequestHandler.processRequest(HttpRequestHandler.java:767)"               
    "     at com.evermind.server.http.HttpRequestHandler.run(HttpRequestHandler.java:259)"               
    "     at com.evermind.server.http.HttpRequestHandler.run(HttpRequestHandler.java:106)"               
    "     at EDU.oswego.cs.dl.util.concurrent.PooledExecutor$Worker.run(PooledExecutor.java:803)"               
    "     at java.lang.Thread.run(Thread.java:534)"               
    ## Detail 0 ##               
    java.io.IOException: Stream closed               
    "     at java.io.BufferedWriter.ensureOpen(BufferedWriter.java:98)"               
    "     at java.io.BufferedWriter.write(BufferedWriter.java:197)"               
    "     at OAExport.jspService(OAExport.jsp:107)"               
    "     at com.orionserver.http.OrionHttpJspPage.service(OrionHttpJspPage.java:56)"               
    "     at oracle.jsp.runtimev2.JspPageTable.service(JspPageTable.java:317)"               
    "     at oracle.jsp.runtimev2.JspServlet.internalService(JspServlet.java:465)"               
    "     at oracle.jsp.runtimev2.JspServlet.service(JspServlet.java:379)"               
    "     at javax.servlet.http.HttpServlet.service(HttpServlet.java:853)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.invoke(ServletRequestDispatcher.java:727)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.forwardInternal(ServletRequestDispatcher.java:306)"               
    "     at com.evermind.server.http.HttpRequestHandler.processRequest(HttpRequestHandler.java:767)"               
    "     at com.evermind.server.http.HttpRequestHandler.run(HttpRequestHandler.java:259)"               
    "     at com.evermind.server.http.HttpRequestHandler.run(HttpRequestHandler.java:106)"               
    "     at EDU.oswego.cs.dl.util.concurrent.PooledExecutor$Worker.run(PooledExecutor.java:803)"               
    "     at java.lang.Thread.run(Thread.java:534)"               
    java.io.IOException: Stream closed               
    "     at java.io.BufferedWriter.ensureOpen(BufferedWriter.java:98)"               
    "     at java.io.BufferedWriter.write(BufferedWriter.java:197)"               
    "     at OAExport.jspService(OAExport.jsp:107)"               
    "     at com.orionserver.http.OrionHttpJspPage.service(OrionHttpJspPage.java:56)"               
    "     at oracle.jsp.runtimev2.JspPageTable.service(JspPageTable.java:317)"               
    "     at oracle.jsp.runtimev2.JspServlet.internalService(JspServlet.java:465)"               
    "     at oracle.jsp.runtimev2.JspServlet.service(JspServlet.java:379)"               
    "     at javax.servlet.http.HttpServlet.service(HttpServlet.java:853)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.invoke(ServletRequestDispatcher.java:727)"               
    "     at com.evermind.server.http.ServletRequestDispatcher.forwardInternal(ServletRequestDispatcher.java:306)"               
    "     at com.evermind.server.http.HttpRequestHandler.processRequest(HttpRequestHandler.java:767)"               
    "     at com.evermind.server.http.HttpRequestHandler.run(HttpRequestHandler.java:259)"               
    "     at com.evermind.server.http.HttpRequestHandler.run(HttpRequestHandler.java:106)"               
    "     at EDU.oswego.cs.dl.util.concurrent.PooledExecutor$Worker.run(PooledExecutor.java:803)"               
    "     at java.lang.Thread.run(Thread.java:534)"               
    ">
    </head>
    <body>
    <table width=100%"" border=""0"" cellspacing=""0"" cellpadding=""0"">"               
    <tr> <td><img src="/OA_MEDIA/FNDSSCORP.gif" alt=""> </td></tr>               
    <tr> <td> </td>                
    <td> <a href= /OA_HTML/OALogout.jsp>Logout </a></td>                
    </tr>               
    <tr> <td width="100%" nowrap class="OraBGColorDark" >  </td> </tr>               
    </table>               
    <p>               
    <center>               
    <table width="95%" border="0" cellspacing="0" class="OraBGAccentDark" cellpadding="0">               
    <tr> <td>   </td> </tr>               
    <!-- <tr> <td class="OraErrorHeader"> <img src=/OA_HTML/cabo/images/errorl.gif> Error Page </td> </tr> -->               
    <tr> <td class="OraErrorHeader"> <center> Error Page </center> </td> </tr>               
    <tr> <td colspan=2 class="OraBGColorDark"> </td> </tr>               
    <tr> <td>   </td> </tr>                
    <tr> <td colspan=2 class="OraErrorText" >You have encountered an unexpected error. Please               
    contact the System Administrator for assistance. </td> </tr>                
    <tr> <td colspan=2 class="OraErrorText"> Click <a href=javascript:ignoreWarnAboutChanges("/OA_HTML/OAErrorDetailPage.jsp")> here </a> for exception details.                
    </td> </tr>               
    </table>               
    </center>               
    </div>               
    </body>               
    </html>
    Please help me to solve this issue.
    Thanks,
    Srinivas

Maybe you are looking for