; NAME: ; photon1_reduce.pro (version 3.8) ; ; PURPOSE: ; This program is used to reduce data taken by the photonxfer_test ; procedure. ; ; CALLING SEQUENCE: ; photon1_reduce testdir, [snr_min=snr_min, sigclip=z-score, rejectcyc=rejectcyc, ; outps=outps, infile=infile, outfile=outfile, ; outdir=outdir, noref=noref, ma_refcor=ma_refcor] ; ; INPUTS: ; testdir - The directory containing the photon transfer images and a ; file containing a list of their file names. If the infile ; keyword is not set, a file name of flistBJR.lst is assumed. ; ; ; KEYWORD PARAMETERS: ; snr_min - minimum signal to "readout noise" ratio for fitting (this ; is defined in a funny way; can be set to a very low value ; (i.e. 1e-8) to force fitting using all data points). This ; parameter is not used in this script, it is just passed on ; as a parameter to the call to photon_analysis. ; sigclip - z-score for outlier removal ; rejectcyc - number of rejection cycles for outlier removal ; outps - redirect output to postscript file ; region - rectangular area of array to use for reduction. Specify as ; 4-element int array [x1,x2,y1,y2] to process columns x1 to ; x2 in rows y1 to y2. ; infile - Name of file containing file names of input images. ; Default is 'flistBJR.lst' ; outfile - Name of output data file to create. Default is constructed ; from filters used for the experiment. ; outdir - Directory where output files are written. ; Default = testdir+'Results' ; noref - Set this keyword to disable reference pixel correction ; ma_refcor - Set this keyword if maximal-averaging reference pixel ; correction is desired. ; colinc - column increment in analysis region ; rowinc - row increment in analysis region ; ; NOTE: Default behavior is to do spatial average reference pixel ; correction. If both ma_refcor and noref are set, the noref keyword ; over-rides the ma_refcor keyword and no reference pixel correction will ; be done. ; ; EXAMPLE ; Perform photon transfer reduction and create postscript file in default 'Results' subdirectory ; IDL>photon1_reduce, pxdir, snr_min=1e-8, region=[257,1798,256,1791] ; ; REFERENCE: ; ; MODIFICATION HISTORY: ; Written by: ; Michael Telewicz, IDTL, May 20, 2002 ; Modified: ; Michael Telewicz, IDTL, May 27, 2002 ; Added header to script ; Bernie Rauscher, IDTL, August 8, 2002 ; Updated links to refsub and mkcds. Fixed bugs on unix platform. ; Sungsoo Kim, September 19, 2002 ; Changed procedure name from photonxfer to photon. ; Now considers all data cubes with the same exposure time at ; once (not as a pair). ; Removed analysis 'regions'. Now considers only one analysis ; area defined by xarea and yarea. ; Added error bars to data points and fitting results. ; Linear fitting (instead of the non-linear fitting) is ; implemented in order to measure error bars of the fit ; parameters more easily. ; Sungsoo Kim, October 7, 2002 ; Added SNR criterion. ; Ernie Morse, IDTL, February 6, 2003 (version 2.1) ; Added keyword to allow user to specify analysis region ; Ernie Morse, IDTL, March, 2003 (version 3.0) ; Split analysis portion into separate procedure ; Added sa_refcor, ma_refcor, outfile, outdir keywords ; Ernie Morse, IDTL, March 26, 2003 (version 3.1) ; Made modification to subtract bias frame if available and ; write out a line explaining whether or not this subtraction was ; done to the output text file ; Made change to error calculation--only dividing sigma by size of ; analysis region, not by number of elements in multi-frame cube. ; Ernie Morse & Don Figer, IDTL, April 9, 2003 ; Made modifications to allow for processing single-pixel data ; Mike Regan, IDTL, May 5, 2003 ; Removed first read at each exp time from the calculation. It ; seems to be too low at higher flux levels. Also, fixed ; uncertainty calcuation. Removed all zero std dev pixels from ; the calcuation. ; Ernie Morse, IDTL, May 6, 2003 (v3.4) ; Corrected rejection of zero std. dev. pixels--it didn't work if ; reference pixel correction was performed. ; Ernie Morse, IDTL, May 9, 2003 (v3.5) ; Eliminated sa_refcor keyword--spatial average reference pixel ; correction is now default ; Added noref keyword--can be set to explicitly disable reference ; pixel correction ; Made changes in code to reflect new default case ; (spatial averaging). ; Changed default output directory to testdir + 'Results' ; Removed assumption that input directory names don't end with ; the path delimiter--it is now added at the ; beginning if not present on input. ; Ernie Morse, IDTL, January 16, 2004 (v3.6) ; Changed process by which images are grouped by exposure time ; in order to make it more efficient and easier to understand. ; Added comments and brought code into compliance with IDTL ; coding standards. ; Changed name to photon1_reduce to be consistent with naming ; convention established for IDTL scripts. ; Ernie Morse, IDTL, January 20, 2004 (v3.7) ; Removed references to fitonly and debug keywords, as neither ; is needed any longer. If re-plotting of old data is required, ; it can be accomplished by calling photon_analysis directly ; Ernie Morse, IDTL, July 20, 2004 (v3.8) ; Added rowinc and colinc keywords to allow specification of ; non-contiguous rows and columns in the analysis region ; pro photon1_reduce, testdir, snr_min=snr_min, sigclip=sigclip, $ rejectcyc=rejectcyc, outps=outps, region=region, infile=infile, $ outfile=outfile, outdir=outdir, sa_refcor=sa_refcor, $ ma_refcor=ma_refcor, rowinc = rowinc, colinc = colinc ; check to see if input directory name ends in path delimiter--if not, ; add it to the end if (strmid(testdir, strlen(testdir)-1, 1) ne path_sep()) then testdir = testdir + path_sep() ; set outdir to testdir + 'Results' if not set by user if (not keyword_set(outdir)) then outdir = testdir + 'Results' + path_sep() ; check to see if output directory name ends in path delimiter--if not, add it ; to the end if (strmid(outdir, strlen(outdir)-1, 1) ne path_sep()) then outdir = outdir + path_sep() ; set defaults input file name if (not keyword_set(infile)) then infile = 'flistBJR.lst' if (not keyword_set(snr_min)) then snr_min = 0. if (not keyword_set(sigclip)) then sigclip = 5 if (not keyword_set(rejectcyc)) then rejectcyc = 3 if (not keyword_set(outps)) then outps = 0 if (not keyword_set(rowinc)) then rowinc = 1 if (not keyword_set(colinc)) then colinc = 1 ; Get SCA info by reading the header of the 1st image listed in the input ; file. openr, lun1, testdir+infile, /get_lun tempfile='' readf, lun1, tempfile close, lun1 fits_read, testdir + tempfile, 0, header, /header_only ; read detector name from header and extract detector type from the name. ; Detector type is assumed to be everything up to the first '-' character ; in the detector name. SCAName = sxpar(header, 'DETNAME') temp = str_sep(SCAName,'-') SCAType = temp[0] ; might as well get image size parameters while we're at it xsize = sxpar(header, 'naxis1') ysize = sxpar(header, 'naxis2') ; Determine default analysis regions based on detector type (unless a ; region was provided by the user through the region keyword. if (NOT (keyword_set(region))) then begin case scaType of 'H1RG' : begin xarea=[4,1019] yarea=[4,1019] end 'H2RG' : begin xarea=[4,2043] yarea=[4,2043] end 'SB304' : begin xarea=[4,2051] yarea=[0,2047] end else : begin print, 'Detector type ' + scaType + $ ' not supported. Exiting procedure.' return end endcase endif else begin xarea = [region[0], region[1]] yarea = [region[2], region[3]] endelse ; find number of column (x) and row (y) pixels in the analysis area nxarea = ceil((xarea[1] - xarea[0] + 1) / float(colinc)) nyarea = ceil((yarea[1] - yarea[0] + 1) / float(rowinc)) ; read in all of the FITS file names from the input file readcol, testdir + infile, files, format='(A)' ; define an array of name-exposure time-Julian date tuples. When ; exposure times are sorted and grouped later, this will automatically ; keep the correct file names associated with the exposure times. imginfo = replicate({name:'', exp_time:0.0, jdate:0.0d}, $ n_elements(files)) ; put the file names into the tuple array imginfo.name = files for filecount = 0, n_elements(files) - 1 do begin ; get the exposure times and Julian dates by reading the headers of ; each of the FITS files fits_read, testdir + files[filecount], 0, header, /header_only imginfo[filecount].exp_time = sxpar(header, 'exp_time') ; get time and date from header. IDTL FITS headers are ; non-standard, so the gettime and getdate functions must be used ; instead of sxpar. getdate, header, theDate gettime, header, theTime ; divide the date and time strings into component parts dateparts = strsplit(theDate, '/', /extract) timeparts = strsplit(theTime, ':', /extract) ; assign the date and time components to numeric variables month = fix(dateparts[0]) day = fix(dateparts[1]) year = fix(dateparts[2]) hour = fix(timeparts[0]) minute = fix(timeparts[1]) second = fix(timeparts[2]) ; construct the Julian date and assign to tuple for current file imginfo[filecount].jdate = $ julday(month, day, year, hour, minute, second) endfor ; sort the tuples by increasing exposure time imginfo = imginfo[sort(imginfo.exp_time)] ; The uniq function will find the last array index of each unique ; exposure time value. groupends = uniq(imginfo.exp_time) ; The timevals array will hold the exposure times of each group timevals = (imginfo.exp_time)[groupends] ; numtimes is the number of unique exposure times numtimes = n_elements(timevals) ; num_per_time is the number of images with each exposure time num_per_time = intarr(numtimes) num_per_time[0] = groupends[0] + 1 num_per_time[1:*] = groupends[1:*] - groupends[0:numtimes - 2] ; now the images in each exposure time group can be processed. ; Start by declaring some arrays to hold the values of interest. ; avgadu holds median of mean pixel values for each exposure time group. avgadu = fltarr(numtimes) ; sigmavals holds median of pixel standard deviations for each exposure ; time group sigmavals = fltarr(numtimes) ; sigma_adu holds uncertainty in avgadu for each exposure time group. sigma_adu = fltarr(numtimes) ; sigma_sigma holds uncertainties in sigmavals for each exposure time ; group. sigma_sigma = fltarr(numtimes) ; declare a variable to hold the index of the 1st image in each ; exposure time group. It will be adjusted for each group in the ; processing loop that follows. start_index = 0 ; start processing the exposure time groups. for groupnumber = 0, numtimes - 1 do begin print, "Processing image group ", groupnumber + 1 ; construct an image cube that will hold CDS-subtracted frames ; made from the FITS images in the exposure time group. The ; z-dimension of this cube is one less than the number of images ; in the group as the 1st image of each group is omitted (see ; header for explanation). ; declaring this array for each group will consume a lot of ; memory, so it will be set up to use dynamic memory that can be ; freed after each loop iteration thecube = ptr_new(fltarr(nxarea, nyarea, num_per_time[groupnumber] - 1)) ; find names of dark bias frames to use for this group ; assume bias frame has same name up until fourth '_' character, ; then followed by 'd#.fits' nameparts = strsplit(imginfo[start_index].name, '_', /extract) darks = findfile(testdir + strjoin(nameparts[0:2], '_') + '_d*.fits') ; newer data sets use a different naming convention for the ; bias frames, so check for those filenames if the search above ; failed. if ((darks[0] eq '') AND (n_elements(nameparts) ge 5)) then begin nameparts[1] = 'Closed' nameparts[2] = 'Closed' darks = findfile(testdir + strjoin(nameparts[0:4], '_') + '_d*.fits') endif ; if a dark image was found, read it in and construct a ; CDS-subtracted bias frame from it. If not, create an array of ; 0's of the same size for use in the later subtraction step. if (darks[0] ne '') then begin print,'Reading '+darks[n_elements(darks)-1] fits_read, darks[n_elements(darks)-1], bias bias = bias[*,*,1] - bias[*,*,0] endif else begin bias = intarr(xsize, ysize) endelse ; the sorting process isn't guaranteed to preserve the time sequence ; of the files within an exposure time group. We want to skip ; the first exposure taken within each group, so we must find that ; exposure in the group and make sure it is first. ; find index of minimum Julian day in group minjday = $ min(imginfo[start_index:groupends[groupnumber]].jdate, minindex) ; minindex is relative to the 1st index of the group, so it must ; be adjusted to an absolute index minindex = minindex + start_index ; if the exposure with minimum Julian date isn't the 1st image in ; the group, swap it with the 1st one. if (minindex ne start_index) then begin temp = imginfo[start_index] imginfo[start_index] = imginfo[minindex] imginfo[minindex] = temp endif ; skip the 1st image by advancing the group index start_index = start_index + 1 ; begin processing of all images in the group, one at a time for img_index = start_index, groupends[groupnumber] do begin ; declare a variable to keep track of the number of iterations ; of the loop cube_index = img_index - start_index ; read in the CDS image print,'Reading '+testdir + imginfo[img_index].name fits_read,testdir + imginfo[img_index].name, imagetemp, header ; Convert int array to long, as we could have enough signal to ; overflow an int imagetemp = long(imagetemp) ; Collapse the CDS image imagetemp = imagetemp[*,*,1] - imagetemp[*,*,0] ; Subtract bias frame imagetemp = imagetemp - bias ; Construct a mask that will reveal the location of pixels ; with constant signal (i.e., bad pixels). if (cube_index eq 0) then begin constpixmask = imagetemp - imagetemp firstframe = imagetemp endif else begin ; subtract the current frame from the 1st frame. ; add a 1 to the pixel value in the mask for each pixel ; that has the same value as it did in the 1st frame. diffmask = imagetemp - firstframe diffmask[where (diffmask eq 0, complement=diffpix)] = 1 diffmask[diffpix] = 0 constpixmask = constpixmask + diffmask endelse ; do reference pixel correction on the collapsed, ; bias-subtracted image if (not keyword_set(noref)) then begin if (not keyword_set(ma_refcor)) then begin refsub, imagetemp, sca=SCAName, plot=0 endif else begin refsub, imagetemp, sca=SCAName, plot=0, /simple endelse endif ; put the frame into the cube (*thecube)[*,*, cube_index] = $ imagetemp[xarea[0]:xarea[1]:colinc, $ yarea[0]:yarea[1]:rowinc] endfor ; trim the constant pixel mask to the analysis region constpixmask = constpixmask[xarea[0]:xarea[1]:colinc, $ yarea[0]:yarea[1]:rowinc] ; construct arrays to hold pixel-by-pixel mean and standard ; deviations. These are statistics for the pixel values from all ; of the images in the exposure time group. mean_array = ptr_new(fltarr(nxarea, nyarea)) stdev_array = ptr_new(fltarr(nxarea, nyarea)) ; get size of 3rd dimension of the cube zsize = (size(*thecube))[3] ; Calculate the mean for each pixel *mean_array = total(*thecube, 3) / zsize ; Calculate the standard deviation for each pixel for z = 0, zsize - 1 do begin *stdev_array = temporary(*stdev_array) + $ ((*thecube)[*,*,z] - *mean_array)^2 endfor *stdev_array = sqrt(*stdev_array / (zsize - 1)) ; Find all pixels that have a zero SD and remove them from both ; stdev and mean arrays. ; These are all pixels that had the same value in all images, which ; is indicated by a pixel value equal to the one less than the ; number of frames in the cube. constpixmask = where(constpixmask eq zsize - 1, complement=goodpix) ; redeclare the mean & stddev arrays to include only the non-masked ; pixels stdev_array_good = (*stdev_array)[goodpix] mean_array_good = (*mean_array)[goodpix] ptr_free, stdev_array, mean_array ; remove outlying pixels if (n_elements(mean_array_good) ne 1) then begin remoutliers, mean_array_good, mean_array_good, $ sigclip=sigclip, rejectcyc=rejectcyc remoutliers, stdev_array_good, stdev_array_good, $ sigclip=sigclip, rejectcyc=rejectcyc ; calculate statistics for signal and noise avgadu[groupnumber] = median(mean_array_good) > 1.e-4 sigmavals[groupnumber] = median(stdev_array_good) endif else begin ; calculate statistics for signal and noise avgadu[groupnumber] = mean_array_good > 1.e-4 sigmavals[groupnumber] = stdev_array_good endelse ; calculate uncertainties in the signal value and standard deviation sigma_adu[groupnumber] = stddev(mean_array_good) / $ sqrt(long(nxarea) * nyarea) sigma_sigma[groupnumber] = $ stddev(stdev_array_good) / sqrt(long(nxarea) * nyarea) ; print statistics print, ' Signal & stdev(Signal) = ', avgadu[groupnumber], $ sigmavals[groupnumber] print, ' Errors = ', sigma_adu[groupnumber], $ sigma_sigma[groupnumber] ; free up the dynamic memory used for the current exposure time ; group ptr_free, thecube ; set the start index for the next group to its value for the next ; loop iteration. start_index = img_index endfor ; The assumption is that all of the images in the file were taken with ; the same filter combination in the dewar filter wheels (except for ; the first group of images, which were taken with filters closed). ; Therefore, the names of the filters can be taken from the last header ; that was read in. wheel1 = sxpar(header, 'WHEEL1') wheel2 = sxpar(header, 'WHEEL2') ; If reference pixel correction was done, query the refsub function for the ; version number, otherwise, just set a flag indicating it was not done. if (not keyword_set(noref)) then begin ; declare non-empty string necessary for call to refsub when querying ; for version parameter--the version number is returned into it ref_ver = ' ' refsub, 0, sca=' ', version = ref_ver if (not keyword_set(ma_refcor)) then begin ref_type = 'Spatial averaging' endif else begin ref_type = 'Maximal averaging' endelse endif else begin ref_ver = 'Not used' ref_type = 'N/A' endelse ; Save fitting data. if (not keyword_set(outfile)) then begin outfile = wheel1 + '_' + wheel2 + '_' + 'photonxfer_data.txt' endif ; construct a string version of the region array region_str = strarr(6) region_str = [strtrim(string(xarea), 2), strtrim(string(colinc), 2), $ strtrim(string(yarea), 2), strtrim(string(rowinc), 2)] ; find the earliest Julian date of all the image files--this will be ; written out to the image file as the experiment start time mindate = min(imginfo.jdate) ; convert the start time from Julian back to calendar time values caldat, mindate, month, day, year, hour, minute, second ; convert the calendar values to strings for output to file theDate = string(month, format = '(I2)') + '/' + $ string(day, format = '(I2)') + '/' + $ string(year, format = '(I4)') theTime = string(hour, format='(I2)') + ':' + $ string(minute, format = '(I2)') + ':' + $ string(round(second), format='(I2)') ; replace blanks in date and time strings with '0' blankpos = strpos(theDate, ' ') while (blankpos ne -1) do begin strput, theDate, '0', blankpos blankpos = strpos(theDate, ' ') endwhile blankpos = strpos(theTime, ' ') while (blankpos ne -1) do begin strput, theTime, '0', blankpos blankpos = strpos(theTime, ' ') endwhile ; write a header to the output file containing information that will be ; displayed on the plot by photon_analysis.pro file_mkdir,outdir openw, lun2, outdir + outfile, /get_lun printf, lun2, SCAName printf, lun2, wheel1 printf, lun2, wheel2 printf, lun2, ref_ver printf, lun2, ref_type printf, lun2, theDate printf, lun2, theTime printf, lun2, region_str, format = $ '("[", A, ":", A, ":", A, ", ", A, ":", A, ":", A, "]")' if (max(bias) gt 0) then begin printf, lun2, 'Bias subtracted: Yes' endif else begin printf, lun2, 'Bias subtracted: No' endelse ; write all of the data values to the output file for i= 0, n_elements(avgadu) - 1 do begin printf, lun2, timevals[i], avgadu[i], sigmavals[i], sigma_adu[i], $ sigma_sigma[i], format='(5e16.8)' endfor free_lun, lun2 ; call the analysis script to either generate PS and JPG files or print ; plots to the screen. if (keyword_set(outps)) then begin plotfile = strmid(outfile, 0, strpos(outfile, '.')) + '.ps' photon_analysis, outdir, snr_min, /outps, datfile = outfile, $ outfile = plotfile endif else begin photon_analysis, outdir, snr_min, datfile = outfile endelse end