Smooth filter..

Hi,
I've built a simple smooth filter Codec.. but I get strange colors near object edges.. can it be somekind of color bleeding? or there is a bug in my code?
Perhaps in the coversion from and to byte?
here is the code thanks
package camgrabtests;
import javax.media.format.*;
import java.awt.*;
import java.awt.event.*;
import javax.media.*;
import java.util.*;
//controllo dall'alto
public class ResizeBlurEffect implements Effect,ControllerListener {
private Format[] inputFormats;
private Format[] outputFormats;
private Format inputFormat;
private Dimension size;
private Format outputFormat;
protected int width;
protected int height;
protected int center_x;
protected int center_y;
private int xBlurFactor;
private int yBlurFactor;
Format matches(Format in, Format outs[]) {
for (int i = 0; i < outs.length; i++) {
if (in.matches(outs))
return outs[i];
return null;
ResizeBlurEffect(
int width,
int height,
int xBlurFactor,
int yBlurFactor
this.width=width;
this.height=height;
this.xBlurFactor=xBlurFactor;
this.yBlurFactor=yBlurFactor;
//target di default
inputFormats = new Format[] {
new RGBFormat(null,
Format.NOT_SPECIFIED,
Format.byteArray,
Format.NOT_SPECIFIED,
24,
3, 2, 1,
3, Format.NOT_SPECIFIED,
Format.TRUE,
Format.NOT_SPECIFIED)
outputFormats = new Format[] {
new RGBFormat(new Dimension(width,height),
width*height*3,
Format.byteArray,
Format.NOT_SPECIFIED,
24,
3, 2, 1,
3, Format.NOT_SPECIFIED,
Format.TRUE,
Format.NOT_SPECIFIED)
// methods for interface Codec
public Format[] getSupportedInputFormats() {
return inputFormats;
public Format [] getSupportedOutputFormats(Format input) {
if (input == null) {
return outputFormats;
if (matches(input, inputFormats) != null) {
return new Format[] { outputFormats[0].intersects(input) };
} else {
return new Format[0];
public Format setInputFormat(Format input) {
inputFormat = input;
return input;
public Format setOutputFormat(Format output) {
if (output == null || matches(output, outputFormats) == null)
return null;
RGBFormat incoming = (RGBFormat) output;
size = incoming.getSize();
size.height=height;
size.width=width;
//int maxDataLength = incoming.getMaxDataLength();
int lineStride = incoming.getLineStride();
float frameRate = incoming.getFrameRate();
int flipped = incoming.getFlipped();
int endian = incoming.getEndian();
int pixelstride=incoming.getPixelStride();
if (size == null)
return null;
int maxDataLength = height * width * 3;
lineStride = width * 3;
outputFormat = outputFormats[0].intersects(new RGBFormat(size,
maxDataLength,
incoming.getDataType(),
frameRate,
Format.NOT_SPECIFIED,
Format.NOT_SPECIFIED,
Format.NOT_SPECIFIED,
Format.NOT_SPECIFIED,
pixelstride,
lineStride,
flipped,
endian));
System.out.println("final outputformat = " + outputFormat);
return outputFormat;
//funzione dove avvine il processamento dello stream
public int process(Buffer inBuffer, Buffer outBuffer) {
long max_area = 1;
int maxInputIndex=0;
int maxoutputIndex=0;
System.out.println("process in:" +inBuffer.getFormat());
int outputDataLength = ((VideoFormat)outputFormat).getMaxDataLength();
validateByteArraySize(outBuffer, outputDataLength);
System.out.println("process out:" +outBuffer.getFormat());
outBuffer.setLength(outputDataLength);
outBuffer.setFormat(outputFormat);
outBuffer.setFlags(inBuffer.getFlags());
byte [] inData = (byte[]) inBuffer.getData();
byte [] outData = (byte[]) outBuffer.getData();
RGBFormat vfIn = (RGBFormat) inBuffer.getFormat();
Dimension sizeIn = vfIn.getSize();
final int pixStrideIn = vfIn.getPixelStride();
final int lineStrideIn = vfIn.getLineStride();
int iw = sizeIn.width;
int ih = sizeIn.height;
final int filter_width=Math.max(iw/width,1)+xBlurFactor;
final int filterHeight=Math.max(ih/height,1)+yBlurFactor;
byte maxInputRed=+127;
long maxOutputRed=+127000;
for (int y = 0,counter=0; y < height; y++)
for (int x = 0; x < width; x++,counter+=3) {
final int start_Y,start_X,end_X,end_Y;
start_Y= Math.min(Math.max(y-filterHeight/2,0),ih);
//System.out.println("start_Y: "+start_Y);
end_Y=Math.min(Math.max(y+Math.max(filterHeight/2,1),0),ih);
start_X= Math.min(Math.max(x-filter_width/2,0),iw);
//System.out.println("start_X: "+start_X);
end_X=Math.min(Math.max(x+Math.max(filter_width/2,1),0),iw);
//System.out.println("end_X: "+end_X);
if ( outData.length < filter_width*filterHeight*pixStrideIn ) {
System.out.println("the buffer is not full");
return BUFFER_PROCESSED_FAILED;
// System.out.println("count = " + count);
// System.out.println("vsin = " + vsin + " vcos = " + vcos);
int index=startY*lineStrideIn+start_X*pixStrideIn;
int outidx=0;
final int actual_lenght=(end_X-start_X);
final int step=(endX-start_X)*pixStrideIn;
final int destLineStride= width*pixStrideIn;
//System.out.println("step: "+_step);
//System.out.println("_index start: "+_index);
float acc1=0,acc2=0,acc3=0;
for (int i = start_Y; i < end_Y; i++) {
for (int j = 0; j < _step; j+=3) {
if(x==width/2&&y==height/2)
System.out.println("partial red= "+inData[_index+j]);
acc1+=inData[_index+j];
acc2+=inData[_index+j+1];
acc3+=inData[_index+j+2];
_index+=lineStrideIn;
float area=actual_lenght*(end_Y-start_Y);
outData[counter]=(byte)(acc1/area);
if(x==width/2&&y==height/2)
System.out.println("mean red= "+outData[counter]);
outData[counter+1]=(byte)(acc2/area);
outData[counter+2]=(byte)(acc3/area);
return BUFFER_PROCESSED_OK;
public String getName() {
return "Rotation Effect";
public void open() {
public void close() {
public void reset() {
// methods for interface javax.media.Controls
public Object getControl(String controlType) {
return null;
public Object[] getControls() {
return null;
byte[] validateByteArraySize(Buffer buffer,int newSize) {
Object objectArray=buffer.getData();
byte[] typedArray;
if (objectArray instanceof byte[]) {     // is correct type AND not null
typedArray=(byte[])objectArray;
if (typedArray.length >= newSize ) { // is sufficient capacity
return typedArray;
byte[] tempArray=new byte[newSize]; // re-alloc array
System.arraycopy(typedArray,0,tempArray,0,typedArray.length);
typedArray = tempArray;
} else {
typedArray = new byte[newSize];
buffer.setData(typedArray);
return typedArray;
//implements controllerListener
public void controllerUpdate(ControllerEvent ce) {
System.out.println(ce);
// if (ce instanceof EndOfMediaEvent) {
// //System.out.println("Dimensione vettore dati: "+vettoreDati.size());
// //SalvaDati sd=new SalvaDati(nomeFile+".blob",vettoreDati);
// if (ce instanceof FormatChangeEvent) {
// System.out.println("Dimensione vettore dati: "+vettoreDati.size());
// SalvaDati sd=new SalvaDati(nomeFile+".blob",vettoreDati);
public int getWidth() {
return width;
public void setWidth(int width) {
this.width = width;
public void setRescaleBlurFactors(
int width,
int height,
int xBlurFactor,
int yBlurFactor
this.width=width;
this.height=height;
this.xBlurFactor=xBlurFactor;
this.yBlurFactor=yBlurFactor;
public static void main(String args[]){
int a=0;
byte b=-20;
for (int i = 0; i < 100; i++) {
a+=b;
System.out.println("a: "+a);
public int getXBlurFactor() {
return xBlurFactor;
public void setXBlurFactor(int xBlurFactor) {
this.xBlurFactor = xBlurFactor;
public int getYBlurFactor() {
return yBlurFactor;
public void setYBlurFactor(int yBlurFactor) {
this.yBlurFactor = yBlurFactor;

there was a problem in the conversion from byte to int
for this rgb format
i added this 2 functions and now it works..
private final byte convert(float x){
return (byte)x;//((x>127)?(x-255):x);
//return (byte)x;
private final int deconvert(byte x){
return (int)(x & 0xff);
//return x;
and changed the code in this way
//inside teh process method
for (int i = start_Y; i < end_Y; i++) {
for (int j = 0; j < _step; j+=3) {
//if(x==width/2&&y==height/2)
// System.out.println("partial red= "+inData[_index+j]);
acc1+=deconvert(inData[_index+j]);
acc2+=deconvert(inData[_index+j+1]);
acc3+=deconvert(inData[_index+j+2]);}
_index+=lineStrideIn;
float area=actual_lenght*(end_Y-start_Y);
outData[counter]=convert(acc1/area);
//if(x==width/2&&y==height/2)
// System.out.println("new mean red= "+outData[counter]);
outData[counter+1]=convert(acc2/area);
outData[counter+2]=convert(acc3/area);
Message was edited by:
dimitri

Similar Messages

  • Chroma key - color smoothing filter renders vertical lines

    several vertical lines appear in render (or QT output), but if I turn the color smoothing filter off they disappear (at the cost of messy chromakey).
    These lines do NOT appear when the playback is stopped on (unrendered) timeline or when using play every frame (option + P).
    I am using a 720p60 Prores 422 sequence. Interestingly if I output to mpeg2 via Compressor the lines do not appear (but poor HD to SD downscale - hence why I seek QT prores output)
    For the moment I turn it off and mess with the chroma keyer controls to clean it up.

    Using 4:2:2 smoothing, upstream (above) the keyer, meant to say 720p50 PAL sequence and clips captured via Io Express direct to Prores.

  • 4:2:0 Color Smoothing Filter for HDV? keying HDV

    having fun here trying to key a green screen music video shot in HDV..
    thought it would be nice to try FCP's plugins on it... including some sort of "pre-blur" but unfortunately i don't see a color smoothing for 4:2:0 color space in which HDV shoots..
    any suggestions on what the best way to key this stuff is

    Try the G Chroma Sharpen filters in Film Effects (www.nattress.com), which were designed for this. They do 4:2:0 smoothing, and then sharpen the chroma based upon luma detail to get the best edge possible.
    Graeme

  • Skin smoothing stopped working and color overlay

    Hi All,
    Been tearing my hair out.
    The skin smoothing tool has stopped working completely across all projects. If I select "color overlay" in the gears menu, sometimes the existing smooth brush strokes appear in red,.. but mostly they don't.
    ** I can't add any new brush strokes ** they simply do not appear. Interestingly, if you select brush strokes you can see the strokes but the actual skin smooth filter isn't applied to the mask! WEIRD! You can also select invert and the filter is applied to the whole image - but you cannot manipulate the mask at all.
    I've reinstalled Aperture and I've also rebuilt the library but nothing seems to work.
    Background: I'm running Aperture 3.4.3 on Macbook Retina with latest OS, 16Gb/500SSD - nothing else and I'm shooting with a Nikon D800 in RAW.
    Does anyone have any ideas on how to fix this? I have a sweaty model that needs airbrushing ASAP!
    Thanks,
    Matthew

    Matthew, does it work, if you use a different Aperture library? Then repair your Aperture library.
    If it does not even work for a different Aperture library,try working from a different user account. Can you use the brushes there? If yes, remove your Aperture User Preferences from your User Library:
    The user library is hidden.  To reveal it, use the Finder's "Go" menu.
    Click somewhere on your empty Desktop to bring the Finder forward, then you'll see "Go" in the Finder's main menu bar.
    Then hold down the options key ⌥ until "library" appears in the drop down menu. Click Library to open.  Remove the file
    your Home Folder/Library/Preferences/com.apple.Aperture.plist.
    Regards
    Léonie

  • Monitoring progress when performing filter operations on images

    Hi,
    I am working on Java2D and making some image filters using classes like ImageFilter etc. Now For simple filters I want to get the progress status while the filteting is in progress, so that I can show a progress bar in the GUI.
    For this I do not have much idea. I think we can use the ImageConsumer interface, so that if the class implementing that interface list itself as an image consumer with the filter operation. Here is two way I have tried implanting this:
    1st way:
    Class MyConsumer implements ImageConsumer
    int width,height;
    int percentProgress;
    Public image processImage(Image srcImage, ImageFilter filter){
    FilteredImageSource fis: new FilteredImageSource(srcImage.getSource(),filter());
    //should we do this?
    fis.addConsumer(this);
    //or this?
    srcimage.addConsumer(this)
    Image destImage = this.createImage(fis);
    return destImage;
    //implementing methods of imageConsumer
    //we get the width and height of the new destImage (or do we get the dimension of the srcImage here??)
    public void setDimensions(int width, int height){
    this.width=width;
    this.height=height;
    public void setPixels(int x, int y, int w, int h, ColorModel model, int[] pixels, int off, int scansize){
    // Is this the correct way of obtaining the progress????
    progressPercent=(y*100)/height;
    //We also have empty implementation of other ImageConsumer methods after this
    --------------------------------------------------------------2nd way, let?s say we use a smooth filter and replace the processImage(..) method above with the one below (the other methods being same):
    Public image processImage(Image srcImage){
    float[] SHARPEN3x3 = {      0.f, -1.f, 0.f,
                                -1.f, 5.0f, -1.f,
                                0.f, -1.f, 0.f};
    BufferedImage dstbimg = new
                  BufferedImage(iw,ih,BufferedImage.TYPE_INT_RGB);
    Kernel kernel = new Kernel(3,3,SHARPEN3x3);
    ConvolveOp cop = new ConvolveOp(kernel,
                                    ConvolveOp.EDGE_NO_OP,
                                    null);
    //should we do this?
    dst.addConsumer(this);
    //or this?
    srcImage.addConsumer(this);
    cop.filter(srcImage,destImage);
    return destImage;
    }Now I do get a progressPercent. But when the percent is 100%, the destimage does not return, that means the operation is not yet complete.
    So is this the correctw ay, or is there any other way to know the progress of the flter operation.
    Also I extended the ImageFilter class and overrode its setPixel method as:
    public void setPixels(int x, int y, int w, int h, ColorModel model, int[] pixels, int off, int scansize){
    int progressPercent=(y*100)/height;
    System.out.println("progress in image filter:"+progressPercent);
    //I simply print out the progress and let the super perform the filter operation
    super.setPixels(x, y,w, h, model, pixels, off, scansize);
    //should I do this below? This calls the imageconsumer's setpixels
    //that I implement in the MyConsumer class
    consumer. setPixels(x, y,w, h, model, pixels, off, scansize);
    }I observe that the setPixel method of MyImageFilter is called several times even for the same scan lines, and even after percentProgress is 100%, the process runs again for 2-4 times. So the filtering operation scans the lines several times to finally perform the filtering, and as such the percentProgress is incorrect.
    Can anybody guide me. Is this approach okay and needs to be modified a bit, or there is a different approach to obtain the progress from a filter operation?
    I would finally want the same progress from other operations like LookupOp, ConvolveOp, AffineTransformOp, ColorConvertOp, RescaleOp.
    Thanks in advance
    Tanveer
    DrLaszloJamf, are you there? I am sure you are one of them who can help me in this.
    anyone can send me a private message too at [email protected]

    Please somebody let me know how to get the progress of imagefilter operation using convolveOp.filter(...) or Filteredimagesource. I waant to display the progress i a JProgressbar.

  • Issue with 3-point smoothing

    HI,
    I am trying to reduce my baseline noise with a 3-point smoothing filter. Attached are my printscreen result and my vi. I found that the 3-point smoothing did not have a significant effect to smooth the baseline. 
    I wonder if I programmed it wrong or the idea of 3-point smoothing is not a ideal way to reduce the background noise. In my case, it's ~0.7+/-0.2.  
    Please comment, 
    Michael
    Attachments:
    PrintScreen.JPG ‏174 KB
    VoltageMeterVIANIUSB6210v6CurveSmoothing.vi ‏172 KB

    Hi Lynn,
    The source of the signal is from a laser but I put ND filters to lower the intensity. The rate is 100Hz and the # of sample is 10 in continue mode. It happens that I have an old NI card (NI4050) in the lab. I tried with its old vi and suprisingly, it showed no noise. Result attached. 
    Now I want to try out this really old VI with the new NI-USB-6210. But I don't know the proper device # for NI6210. I was able to find the device # for NI4050 in "Measurement and automation explorer" but not 6210 (printscreen attached).
    Attached is the old vi.
    Please help to modify it so that it is usable for USB6210.
    Thanks in advance,
    Michael
    Attachments:
    NI4050 no noise.jpg ‏207 KB
    DeviceNo.JPG ‏84 KB
    PMT meter Ni6210.vi ‏52 KB

  • DVCAM to ProRess - Clip Analyze - Color Smoothing

    Hello,
    A few questions that I'll try to make brief. I'd really appreciate someone experienced giving their outlook.
    1 - I've got an old DVCAM project captured and backed up to a hard drive. I'm just pulling the project out again to get it as nice as possible for a master copy and DVD; finish it off as best as possible.
    Should I use media manager to recompress to ProRess(HQ) for PAL if I'm going to be doing extensive color grading?
    2 - When I look at the clips in the timeline with the analyze tool it says Codec: DV - Pal at Medium Quality. When I check other (my original captured files - before I used media manager on the whole project) those files say Codec: DV - Pal at Most Quality.
    Should I be re-creating my project from the original captured files, as the one's my timeline is referencing (after using Media Manager to consolodate the project) say Medium Quality?
    I assume, if so, I should do this before the ProRess conversion process, right?
    3 - I've heard of using a color smoothing filter before the conversion process to ProRess, but my footage is DVCAM PAL 4:2:0. Does that mean I just skip any color smoothing step, or is there another process for those coming from a 4:2:0 color space?
    Thanks so much for any help,
    Matthew Morreale
    Yoga Maya Films - www.yogamayafilms.com
    The Film Producer's Podcast - www.yogamayafilms.com/podcast

    I'll give it one more shot (bring the post to the top of the list) to see if anyone could help with answers to these things.
    Anyone?

  • Logic won't bounce my filter!

    For a MIDI instrument in Logic Pro 7, I altered the filter for about 30 seconds. I put it on an LPF 24 and strategically adjusted the frequency and resonance for 30 seconds.
    But when I bounce my song and listen to it (in iTunes), the instrument got bounced without the filter alterations. It just sounded as if there was nothing altered regarding the filter.
    I thought maybe there was too much data for Logic Pro to stomach, so I bounced just the track with the filter. Same thing.
    What am I doing wrong? Or, what's wrong with Logic Pro 7?

    I took your advice and made the adjustments in Audio Preferences, but it still sounds the same (i.e. bad).
    I even bounced just that audio track with the smooth filter adjustments in realtime and as I heard it being recorded/played I could even hear the jumps to the nearest 4th-8th beat there.
    So the problem isn't that I'm bouncng too much, it's that Logic Pro 7 isn't properly processing my adjustments.
    Still need help. :/
    Details...
    I made the adjustments via filters through an AU Instrument (not an instrument that comes with Logic or through a filter plug-in directly from Logic). All the AU instruments I use work just fine. Logic records these adjustments (I can see it in the events list) and plays them back just fine. But when it bounces, it records the adjustments horribly (as I have explained).

  • DVD quality after going from DV to MPEG4

    I have been frustrated over the loss of clarity when I use iMovie for capturing movies from my video camera. Has anyone else had the problem: I capture the video (from DV). The video looks sharp and clear on iMovie. When I save the movie to an external hard drive, I guess the format automatically changes to MPEG4 (?). (I am trying to store family movies on one large external hard drive) Then, when I want to burn the movie, using iDVD, I follow the instructions, import the movie, set it up the way I want it in the chapters, etc., then burn it. (I choose professional quality for preference) When I view the DVD later, on several different DVD players, the quality of the video is always hazy and off. Not to the point of losing the picture, but there is definitely a significant loss of clarity. I got the Apple computer because everyone said it was a superior video tool. I am very disappointed at this point, but wonder if anyone has a suggestion about my process and, perhaps, I am doing something wrong. Any help would be greatly appreciated. Thanks.

    The way i usually do it is capture everything DV via Firewire, since you gain nothing by capturing uncompressed (because it's already compressed on the tape). Now I usually edit and colour correct in DV to keep the real time capabilities and everything that is done in Combustion (or any other FX, compositing or graphics app) is saved as uncompressed. Now when everything is finished I duplicate the sequence and switch it to uncompressed (or photo-jpg at 75% which is good enough if you work with DV footage), render and export as Quicktime movie. That way your footage won't get recompressed to DV. You can throw a chroma smoothing filter on the clips, just make sure it's first in the list of filters. This only makes sense if you use titles, vector graphics or loads of filters.

  • Green Screen Ball

    I am trying to green screen a moving tennis ball so that it looks like a glowing orange piece of the sun. The tennis ball is being held by different people who are walking around carrying it in their hand. The camera is focused on the tennis ball the whole time. I tried using chroma keyer but doesn't seem to pick up the tennis ball very well as it's moving.
    Any ideas on how to achieve this glowing ball effect?

    You can do tracking and match-moving in Motion.
    When chroma-keying in FCP, be sure to insert a Chroma Smoothing Filter
    before your keyer: 4:1:1 for DV, 4:2:2 for DVCPRO HD.
    If the tennis ball is white, you might have better luck with a lumakey.
    The manual has some solid advice on keying, including how to use
    mattes to simplify your work.
    Of course, if the ball is yellow and the people are light-skinned,
    it may be impossible to pull a good key. In that case, tracking in
    Motion is your best bet.
    In fact, the Primatte keyer in Motion is WAY better than FCP's keyers.
    Just do the whole thing (keying, tracking, glow effect) in Motion.
    Check this out:
    http://www.larryjordan.biz/articles/ljchroma-keying_motionFCP.html
    and this:
    http://www.5min.com/Video/Motion-3-Motion-Tracking-in-Motion-Part-1-18629377

  • How to measure voltage changes using a NI-9219 with strain gauges and use the data in labview

    I am updating an old LabView/Data Acquisition system that uses strain gauges to measure two forces (normal and axial) as well as two moments (Pitch and Roll). I am having trouble reading the voltage changes over my strain gauges using my NI-9219 as well as filtering the data with both a lowpass and smoothing filter. The program attached has the project and should have all the sub-vi's used in the program. This program was written on LabView version 8.5 and is being updated to a 2013 version of LabView. Also, the hardware previously used was a combination of a NI SCB-68 and NI USB-6251. If anyone could help me it would be greatly appreciated! 
    Attachments:
    New SSWT.zip ‏751 KB

    Hi Coffeyfj, 
    Did you ever had this set up working? The first thing that I would do as I mentioned in the previous comment would be to use one of the LabVIEW examples. I recommended to you using the (strain - Continuous Input.vi). If you can read any voltage changes with the example vi, then we will know the problem we are having is in your code. If you cannot read any voltage changes using the example I mentioned, we can start trouble shooting the hardware connections. Are you connecting the excitation voltage? If yes, Are you making the right connections. Please refer to page 19 of the following document (half and full bridge modes)
    NI 9219 Operating Instructions and Specifications - February 2009
    http://digital.ni.com/manuals.nsf/websearch/4197ED4BD126E0B486257546005851CA
    Regards,
    William Fernandez
    Applications Engineer 
    National Instruments 

  • Problem of Noise during Phase Unwrapping

    Hello!
    I am using VISION based system to measure phase in a displacemnt measuring interferometer.My phase algorithm gives a wrapped phase as expected saw tooth wave profile whose unwrapped value I am taking with help of a loop which counts each jump from -pi to +pi or vice versa. Problem is that even after using filter noise is not possible to be eliminated practically and if data capturing becomes slow near this edge i.e.where -pi or +pi is aproaching due to noise large number of false count results. To solve this I thought using two different algorithm which gives slightly offset curves so that when pi point is approaching in original one second one is still away from that point and any false jump can be eliminated by comparison.The total phase value can be compared from earlier value in both window and any increase or decrease should agree in each algorithm. this will avoid this practical problem.Can any one suggest me how to go about this problem in LabVIEW or any other solution.
    Thanking you,
    Ajay Shankar
    Ajay Shankar

    The attached VI will take care of phase jumps in either direction. I developed this for another project and it may need modification for your application.
    Have you tried a smoothing filter or sliding window averager to get rid of your noise? These often work better than a simple lowpass filter.
    When the system is moving slowly or stopped recalling the last valid position and allowing only small adjustments to it may prevent the big jumps. In other words, if you can't beat the noise, ignore it. This can be easy to talk about but is not always so easy to implement.
    Lynn
    Attachments:
    Phase Unwrapper.vi ‏51 KB

  • Measuring when a signal reaches equilibriu​m

    I have a thermocouple signal that increases and then flattens off reaching an equilibrium, at this point I would like to do some calculations using the amplitude of this equilibrium level. I wondered what vi's are the best in order to do this sort of mathematics, Any questions should I stick the data through a low pass/smoothing filter and then differentiate remove the initial flat level (baseline at beginning) as the dT/dt increases I wait until it goes back to zero which would suggest that its reached equilibrium. Any one got any example code or suggest some vi's I should look at?
    Nice

    I view TC signals as very susceptible to noise glitches, so I'd want to make myself pretty insenstive to such outliers, or find a way to remove them first.  An outline of the kind of thing I've done in the past:
    1. Calc mean and std dev, then identify all points > 3*std dev's from mean.  Remove them.  (Personally, I'd make sure I used a population of >100 samples to start with).  If more then, say, 5% of the population was removed, I'd acquire a new set of samples and start over with step 1.
    2. On the remaining data, find the best fit straight line and evaluate the slope.  If avg temp rate-of-change too large, return to step 1.  If flat enough, continue.
    3. Calc an average value of remaining data.  With noise--susceptible data, I tend to prefer using a Median rather than a Mean.  When the actual data is pretty well-behaved, the two types of average are very similar.  Median are much less sensitive to glitches though.
    Your filter idea could also work if you have a low enough cutoff freq and high enough attenuation rate to truly squelch the glitches.  I don't think I've got the best intuition about filter choices and settings.  I've gotten good results quicker with my brute-force approach, probably because I have a better intuitive feel for it.  If you're good with filters, that's probably a better technical solution.  I'd still recommend a best-fit line over dozens of samples as a simpler measure of average slope.
    -Kevin P.

  • [HELP] problem of ECHO during record.

    Hello experts,
    I have used a java code for sound record and playback from one of the threads in this forum.
    But i am hearing an ECHO while recording as well as playback. That means the echo is getting stored.
    i'm attaching the code.can u please suggest me any method for ECHO Cancellation.thanks a lot in advance...
    import javax.microedition.midlet.*;
    import javax.microedition.lcdui.*;
    import javax.microedition.media.control.RecordControl;
    import javax.microedition.media.Manager;
    import javax.microedition.media.MediaException;
    import javax.microedition.media.Player;
    import java.io.ByteArrayInputStream;
    import java.io.ByteArrayOutputStream;
    import java.io.IOException;
    public class recordMidlet extends MIDlet implements CommandListener
    private Form init = new Form("info");
    private Command rec = new Command("rec",Command.OK, 1),
    stop = new Command("stop",Command.STOP, 0),
    exit = new Command("exit",Command.EXIT, 0);
    private byte[] recordedSoundArray;
    private Player p;
    private RecordControl rc;
    private ByteArrayOutputStream output;
    public void startApp()
    init.addCommand(rec);
    init.addCommand(stop);
    //init.addCommand(exit);
    init.setCommandListener(this);
    Display.getDisplay(this).setCurrent(init);
    public void pauseApp()
    public void destroyApp(boolean unconditional)
    class dorecord extends Thread
    public void run()
    try
    init.append("record voice ...\n\n");
    // Create a Player that captures live audio.
    p = Manager.createPlayer("capture://audio?encoding=pcm&rate=8000&bits=16");
    p.realize();
    // Get the RecordControl, set the record stream,
    rc = (RecordControl)p.getControl("RecordControl");
    output = new ByteArrayOutputStream();
    rc.setRecordStream(output);
    rc.startRecord();
    p.start();
    catch (Exception ioe)
    init.append(ioe.toString());
    class stopplay extends Thread
    public void run()
    try
    init.append("finish record... starting playback\n\n");
    rc.stopRecord();
    rc.commit();
    p.stop();
    p.close();
    init.append("playing...\n");
    recordedSoundArray = output.toByteArray();
    init.append("recorded sound = " String.valueOf(recordedSoundArray.length)"\n\n");
    ByteArrayInputStream recordedInputStream = new ByteArrayInputStream(recordedSoundArray);
    Player p2 = Manager.createPlayer(recordedInputStream,"audio/x-wav");
    p2.prefetch();
    p2.start();
    //init.append("finish playing...");
    catch (Exception e)
    init.append("stop command: "+e.toString());
    public void recordaud()
    new dorecord().start();
    public void stopplaying()
    new stopplay().start();
    public void commandAction(Command com, Displayable dis)
    if(com == exit)
    notifyDestroyed();
    else if(com == stop)
    init.removeCommand(stop);
    init.addCommand(exit);
    stopplaying();
    else if(com == rec)
    recordaud();

    The attached VI will take care of phase jumps in either direction. I developed this for another project and it may need modification for your application.
    Have you tried a smoothing filter or sliding window averager to get rid of your noise? These often work better than a simple lowpass filter.
    When the system is moving slowly or stopped recalling the last valid position and allowing only small adjustments to it may prevent the big jumps. In other words, if you can't beat the noise, ignore it. This can be easy to talk about but is not always so easy to implement.
    Lynn
    Attachments:
    Phase Unwrapper.vi ‏51 KB

  • Feature Request: Send to Tape via Media Manager

    Please forgive me if this capability is already available in FCP.
    I would love to see the ability to automatically transfer all the clips of a particular project, takes and outtakes included, back to a single, new tape using the Media Manager. Furthermore, the Media Manager could modify the timecode info for each of the clips into a new "archive" project to correspond with the newly created tape. This way, I could basically archive entire projects out to tape and have everything I need footage-wise in one place. With such a feature, bringing a project back online would only require a single batch capture.
    I understand and use the Send to Tape feature of FCP, but it doesn't modify the project file to correspond to the newly recorded footage that is "Sent". Or does it? Please correct me if there is a way to do this. It apparently doesn't back output on autopilot the way I'm suggesting.
    Why not just take large projects offline the old fashioned way?
    Many times I have to rent Digibeta decks to capture film transfer footage. This costs costs $400+ each time I need to bring footage back online. I would love to just archive the material out to inexpensive miniDV which is high enough quality for 95% of my work since I rarely ever do color keying.
    Also, leaving the footage online is getting to be very hard to manage. Since I like to keep my outtakes for future versions, most of my projects are far too large when collected to fit on DVD-R or even Dual Layer. Furthermore, I've had a few drives go bad recently and it's becoming clear to me that online storage isn't really a sustainable workflow for me.
    Any thoughts? I really want to hear from you on this one.

    The loss in quality is really not visible with 16mm footage which is my most common stock used. In fact, I have Varicam 24P footage that looks stunning mastered to DV-25.
    I would suggest that if an editor was planning on advanced keying or rotoscoping the advantages of 4:2:2 would become apparent. But for TV commerical broadcast and DVD distribution, the quality difference is not very apparent at all. I make TV commercials.
    Also, if I print a master to miniDV then dub the miniDV over SDI to Digibeta for broadcast distribution the quality is actually better than miniDV alone. This process actually adds chroma smoothing to the DV's 4:1:1 color space, making it look very, very clean.
    I personally think that DV-25 has gotten a bit of a bad name due to the quality of the cameras that use the format, rather than the actual merits of the format itself. Varicam 24P footage looks great downsampled to dv-25, much better than XL-1 footage does for instance. If one notices jaggies in FCP, just activate the 4:1:1 color smoothing filter and walla! The footage looks just like 4:2:2 to the naked eye.
    Of course, this is only my opinion.
    Alas, with very small text I do get luminance jaggies, presumably from the 1:7 compression, but the workflow I'm describing in this tread would be for source footage, mostly devoid of small text which is added in the FCP project.

Maybe you are looking for

  • Pioneer wireless music reproduction

    hello, i have a pioneer receiver, and before my nokia i had an iphone and an android phone, with both i was able to connect my phone to the receiver via wifi and play music wirelessly in my surround system connected to the receiver, i did it with the

  • The buttons in main the vi get locked after opening a vi reference in a subpanel and using buttons within the subpanel.

    I am trying to open a reference to a vi in the subpanel of the main vi. However after the vi opens in the subpanel and after pressing a few buttons in the subpanel the buttons in the main vi get locked, i.e. I can't even close the main vi or control

  • STOCK TRANSPORT ORDER UNDER SALES ORDER SPECIAL STOCK

    Hello, we have a plant A that produce the same material for some clients and for another plant B (another company). Plant A works under sales order special stock. Plant A produce for sales order (other clients) and for cross-company purchase order (S

  • Understanding the buffer cache architecture

    1st Question : When we want to modify a single record in block ( which has multiple records), does the oracle process reads that particular record into the buffer cache or it will read the entire block into the buffer cache? 2nd Question : Metalink n

  • Today's Update

    Just installed todays software update of hp printing and mac OS-x...then the computer restarted and I had to download newest Itunes...reboot to spinning beach ball for the last 5 minutes