-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathskyloopMS2.py
1016 lines (910 loc) · 44.4 KB
/
skyloopMS2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
# Example fucntion call:
# python skyloopMS.py in=^myFile.lis out='test-skyloop-etgSky.sdf' ref=test-mask.sdf pixsize=4 config=^dimmconfig_skyloop.lis logfile='test-skyloop'
'''
*+
* Name:
* SKYLOOP
* Purpose:
* Create a map using the "inside-out" algorithm.
* Language:
* python (2.7 or 3.*)
* Description:
* This script makes a map from specified raw time-series data using
* the algorithm described at
* http://pipelinesandarchives.blogspot.co.uk/2012/10/inside-out-map-making.html.
* It runs SMURF:MAKEMAP multiple times, performing a single iteration of
* the Dynamic Iterative Map-Maker algorithm on each invocation,
* including data from all chunks. Each map created by MAKEMAP is used
* as the initial sky estimate for the next invocation. MAKEMAP subtracts
* this initial sky estimate from the time-series data before starting
* the first (and only) iteration, and then adds the initial sky estimate
* back on at the end prior to creating the output map.
*
* The script produces several intermediate files: a set of cleaned
* time-series files that may be 2 to 3 times the size of the entire
* set of raw data files included in the map, and a 2D map for every
* iteration. These files are placed in a newly created directory that
* is normally deleted before the script exits. The files can be retained
* for debugging purposes if required by running the script with
* "retain=yes" on the command line.
*
* The temporary files are placed in a directory name "NDG_xxxxx",
* located within the directory specified by environment variable
* STAR_TEMP. If STAR_TEMP is not defined, they are placed in the system's
* temporary directory (e.g. "/tmp").
*
* In addition, files holding the extinction correction factor for each
* data sample, and files holding the noise model, are written to the
* current working directory. These are deleted when the script ends.
* Usage:
* skyloop in out niter pixsize config [itermap] [ref] [mask2] [mask3]
* [extra] [extra1] [retain] [msg_filter] [ilevel] [glevel] [logfile]
* [restart]
* Parameters:
* CONFIG = LITERAL (Read)
* The MAKEMAP configuration parameter values to use. Additions
* will be made as follows:
*
* - First iteration:
* numiter=1
* noi.export=1
* exportNDF=(lut,ext)
* noexportsetbad=1
* exportclean=1
* ast.zero_notlast = 0
* flt.zero_notlast = 0
* com.zero_notlast = 0
* itermap=0
* shortmap=0
* bolomap=0
* flagmap=<undef>
* sampcube=0
* diag.append=0
*
* - Subsequent iterations:
* numiter=1
* noi.import=1
* doclean=0
* importsky=ref
* importlut=1
* ext.import=1
* ast.zero_notlast = 0
* flt.zero_notlast = 0
* com.zero_notlast = 0
* flt.notfirst = 0
* pln.notfirst = 0
* smo.notfirst = 0
* itermap=0
* shortmap=0
* bolomap=0
* flagmap=<undef>
* sampcube=0
* diag.append=1
* downsampscale=0
* downsampfreq=0
* fakemap=<undef>
*
* - Last iteration:
* numiter=1
* noi.import=1
* doclean=0
* importsky=ref
* importlut=1
* ext.import=1
* ast.zero_notlast = 1
* flt.zero_notlast = 1
* com.zero_notlast = 1
* flt.notfirst = 0
* pln.notfirst = 0
* smo.notfirst = 0
* itermap=0
* shortmap=0
* bolomap=0
* flagmap=<undef>
* sampcube=0
* diag.append=1
* downsampscale=0
* downsampfreq=0
* fakemap=<undef>
*
* GLEVEL = LITERAL (Read)
* Controls the level of information to write to a text log file.
* Allowed values are as for "ILEVEL". The log file to create is
* specified via parameter "LOGFILE. ["ATASK"]
* ILEVEL = LITERAL (Read)
* Controls the level of information displayed on the screen by the
* script. It can take any of the following values (note, these values
* are purposefully different to the SUN/104 values to avoid confusion
* in their effects):
*
* - "NONE": No screen output is created
*
* - "CRITICAL": Only critical messages are displayed such as warnings.
*
* - "PROGRESS": Extra messages indicating script progress are also
* displayed.
*
* - "ATASK": Extra messages are also displayed describing each atask
* invocation. Lines starting with ">>>" indicate the command name
* and parameter values, and subsequent lines hold the screen output
* generated by the command.
*
* - "DEBUG": Extra messages are also displayed containing unspecified
* debugging information.
*
* ["PROGRESS"]
* IN = NDF (Read)
* The group of time series NDFs to include in the output map.
* ITERMAP = NDF (Write)
* A 3D NDF to create holding the maps from all iterations. [!]
* LOGFILE = LITERAL (Read)
* The name of the log file to create if GLEVEL is not NONE. The
* default is "<command>.log", where <command> is the name of the
* executing script (minus any trailing ".py" suffix), and will be
* created in the current directory. Any file with the same name is
* over-written. []
* NITER = _INTEGER (Read)
* The number of iterations to perform. A positive value specifies
* a fixed number of iterations to perform. A negative value
* indicates that iterations should continue until the normalized
* change in the map between iterations is less than the value of
* the "maptol" parameter in the configuration supplied by
* parameter CONFIG (a maptol value of 0.05 is used if CONFIG does
* not specify maptol). If a value of zero is supplied for NITER,
* the value used will be read from the "numiter" parameter in the
* configuration. [0]
* MASK2 = NDF (Read)
* An existing NDF that can be used to specify a second external mask
* for use with either the AST, FLT or COM model. See configuration
* parameters AST.ZERO_MASK, FLT.ZERO_MASK and COM.ZERO_MASK. Note,
* it is assumed that this image is aligned in pixel coordinate with
* the output map. [!]
* MASK3 = NDF (Read)
* An existing NDF that can be used to specify a third external mask
* for use with either the AST, FLT or COM model. See configuration
* parameters AST.ZERO_MASK, FLT.ZERO_MASK and COM.ZERO_MASK. Note,
* it is assumed that this image is aligned in pixel coordinate with
* the output map. [!]
* EXTRA = LITERAL (Read)
* A string holding any extra command line options to be passed to
* MAKEMAP (all invocations). [!]
* EXTRA1 = LITERAL (Read)
* A string holding any extra command line options to be passed to
* MAKEMAP for first iteration only. [!]
* MSG_FILTER = LITERAL (Read)
* Controls the default level of information reported by Starlink
* atasks invoked within the executing script. The accepted values
* are the list defined in SUN/104 ("None", "Quiet", "Normal",
* "Verbose", etc). ["Normal"]
* OUT = NDF (Write)
* The NDF holding the output map.
* PIXSIZE = _REAL (Read)
* Pixel dimensions in the output image, in arcsec. The same value
* will be used for both axes. The default depends on the wavelength
* of the input data. []
* REF = NDF (Read)
* An existing NDF that is to be used to define the output grid.
* If supplied, the output grid will be aligned with the supplied
* reference NDF. The reference can be either 2D or 3D and the spatial
* frame will be extracted. If a null (!) value is supplied then the
* output grid is determined by parameters REFLON, REFLAT, etc.
* In addition, this NDF can be used to mask the AST, FLT or COM
* model. See configuration parameters AST.ZERO_MASK, FLT.ZERO_MASK
* and COM.ZERO_MASK.
*
* On the second and subsequent invocations of MAKEMAP, any
* supplied REF image is replaced by the map created by the previous
* invocation of MAKEMAP. [!]
* RESTART = LITERAL (Read)
* If a value is assigned to this parameter, it should be the path
* to a directory containing the intermediate files created by a
* previous run of SKYLOOP. If supplied, execution of skyloop will
* restart from the point where the previous run finished. This is
* useful for continuing runs that have been interupted accidentally.
* The path to the intermediate files can be found by examining the
* log file created by the previous run. [!]
* RETAIN = _LOGICAL (Read)
* Should the temporary directory containing the intermediate files
* created by this script be retained? If not, it will be deleted
* before the script exits. If retained, a message will be
* displayed at the end specifying the path to the directory. [FALSE]
* Copyright:
* Copyright (C) 2012 Science & Technology Facilities Council.
* All Rights Reserved.
* Licence:
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either Version 2 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
* PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
* Authors:
* DSB: David S. Berry (JAC, Hawaii)
* {enter_new_authors_here}
* History:
* 24-OCT-2012 (DSB):
* Original version
* 16-JAN-2013 (DSB):
* - Only include any supplied REF value in the makemap comamnd line
* on the first iteration.
* - Record quality info in the final map.
* 9-JAN-2013 (DSB):
* Add support for diagnostics.
* 28-MAR-2013 (DSB):
* Added parameter RESTART.
* 1-JUL-2013 (DSB):
* Do not export cleaned data on the first iteration if the supplied
* data has already been cleaned. Instead, re-use the supplied data on
* subsequent iterations.
* 10-JUL-2013 (DSB):
* Add support for ast.skip parameter.
* 9-SEP-2013 (DSB):
* Add support for "..._last" parameters.
* 27-SEP-2013 (DSB):
* Changed from using noi.calcfirst=1 to noi.calcfirst=0. The NOI
* model is now calculated after the first iteration is completed,
* and is exported for use on subsequent iterations. Previously, it
* was calculated afresh on every iteration, before the iteration
* commenced. This means that it is now possible for skyloop to
* recognise and use the noi.box_size parameter. But first results
* suggest that the noise values are less stable without calcfirst
* set, causing some bolometers to be given inappropriately small
* variances, and thus be over-emphasised in the final map,
* resulting in visible bolometer tracks. If this is a problem, add
* "noi.calcfirst=1" to your config., and remove "noi.box_size".
* 9-DEC-2013 (DSB):
* - Fix nasty bug which caused the raw (i.e. uncleaned) data to be
* used on every iteration, even though "doclean=0" was used on the
* second and subsequent iteration, thus causing the map to be
* formed from uncleaned data.
* - Ensure only one iteration is used on the second and subsequent
* invocations of makemap, even if ast.skip is non-zero.
* 8-JAN-2014 (DSB):
* - Fix bug that caused NOI model to be ignored on all iterations.
* - Update quality flags in cleaned data after each invocation of makemap.
* - Cache LUT model values.
* 14-JAN-2014 (DSB):
* Ensure same map bounds are used on every invocation of makemap.
* 14-FEB-2014 (DSB):
* Ensure downsampling occurs only on the first invocation of makemap.
* 4-MAR-2014 (DSB):
* Do not update quality flags at the end of each iteration.
* 14-MAY-2014 (DSB):
* Abort if ast.skip is negative.
* 11-JUN-2015 (DSB):
* Only add on any fakemap on the first iteration.
*-
'''
import sys
sys.path.append('/star/bin/smurf')
import glob
import os
import shutil
import starutil
import sys
from starutil import invoke
from starutil import NDG
from starutil import msg_out
def inputAdjuster(cleanFiles, outname):
# load in list of clean files
obsfiles = {}
filein = open(cleanFiles,'r')
for line in filein.readlines():
if line.count("/s8") > 0:
info = line.split("/s8")
elif line.count("/s4") > 0:
info = line.split("/s4")
else:
raise Exception("File band not found")
obsid = info[1][1:15]
if obsfiles.has_key(obsid) == True:
obsfiles[obsid].append(line)
else:
obsfiles[obsid] = [line]
filein.close()
# save an individual file list for each observation
obsids = obsfiles.keys()
nparts = len(obsids)
cleanParts = []
for i in range(0,nparts):
outCleanName = cleanFiles[:-4] + "-part" + str(i) + ".lis"
cleanParts.append("^'"+outCleanName+"'")
fileout = open(outCleanName,'w')
for j in range(0,len(obsfiles[obsids[i]])):
fileout.write(obsfiles[obsids[i]][j])
fileout.close()
outParts = []
for i in range(0,nparts):
outParts.append("'"+outname + "-part" + str(i) + "-toMOS'")
# get folder
position = 0
for i in range(0,len(outname)):
if outname[i] == "/":
position = i
folder = outname[0:position+1]
return nparts, cleanParts, outParts, folder
# Assume for the moment that we will not be retaining temporary files.
retain = 0
# A list of the extinction correction NDFs created by this script. These
# are created and used in the current working directory, and are deleted
# when the script exist.
new_ext_ndfs = []
# A list of the LUT NDFs created by this script. These are created and used
# in the current working directory, and are deleted when the script exist.
new_lut_ndfs = []
# A list of the NOI model NDFs created by this script. These are created
# and used in the current working directory, and are deleted when the
# script exist.
new_noi_ndfs = []
# A function to clean up before exiting. Delete all temporary NDFs etc,
# unless the script's RETAIN parameter indicates that they are to be
# retained. Also delete the script's temporary ADAM directory.
def cleanup():
global retain, new_ext_ndfs, new_lut_ndfs, new_noi_ndfs
try:
starutil.ParSys.cleanup()
if retain:
msg_out( "Retaining EXT, LUT and NOI models in {0} and temporary files in {1}".format(os.getcwd(),NDG.tempdir))
else:
NDG.cleanup()
for ext in new_ext_ndfs:
os.remove( ext )
for lut in new_lut_ndfs:
os.remove( lut )
for noi in new_noi_ndfs:
os.remove( noi )
except:
pass
# Catch any exception so that we can always clean up, even if control-C
# is pressed.
try:
# Declare the script parameters. Their positions in this list define
# their expected position on the script command line. They can also be
# specified by keyword on the command line. If no value is supplied on
# the command line, the user is prompted for a value when the parameter
# value is first accessed within this script. The parameters "MSG_FILTER",
# "ILEVEL", "GLEVEL" and "LOGFILE" are added automatically by the ParSys
# constructor.
params = []
params.append(starutil.ParNDG("IN", "The input time series NDFs",
starutil.get_task_par("DATA_ARRAY",
"GLOBAL",
default=starutil.Parameter.UNSET)))
params.append(starutil.ParNDG("OUT", "The output map", default=None,
exists=False, minsize=0, maxsize=1 ))
params.append(starutil.Par0I("NITER", "No. of iterations to perform",
0, noprompt=True))
params.append(starutil.Par0F("PIXSIZE", "Pixel size (arcsec)", None,
maxval=1000, minval=0.01))
params.append(starutil.Par0S("CONFIG", "Map-maker tuning parameters",
"^$STARLINK_DIR/share/smurf/dimmconfig.lis"))
params.append(starutil.ParNDG("ITERMAP", "Output cube holding itermaps",
default=None, exists=False, minsize=0,
maxsize=1, noprompt=True ))
params.append(starutil.ParNDG("REF", "The reference NDF", default=None,
minsize=0, maxsize=1, noprompt=True ))
params.append(starutil.ParNDG("MASK2", "The second mask NDF", default=None,
minsize=0, maxsize=1, noprompt=True ))
params.append(starutil.ParNDG("MASK3", "The third mask NDF", default=None,
minsize=0, maxsize=1, noprompt=True ))
params.append(starutil.Par0S("EXTRA", "Extra command-line options for MAKEMAP",
default=None, noprompt=True ))
params.append(starutil.Par0S("EXTRA1", "Extra command-line options for MAKEMAP first iteration",
default=None, noprompt=True ))
params.append(starutil.Par0L("RETAIN", "Retain temporary files?", False,
noprompt=True))
params.append(starutil.Par0S("RESTART", "Directory holding data from an interrupted run of skyloop",
default=None, noprompt=True ))
# Initialise the parameters to hold any values supplied on the command
# line. This automatically adds definitions for the additional parameters
# "MSG_FILTER", "ILEVEL", "GLEVEL" and "LOGFILE".
parsys = starutil.ParSys( params )
# It's a good idea to get parameter values early if possible, in case
# the user goes off for a coffee whilst the script is running and does not
# see a later parameter prompt or error.
restart = parsys["RESTART"].value
if restart == None:
retain = parsys["RETAIN"].value
else:
retain = True
NDG.tempdir = restart
NDG.overwrite = True
msg_out( "Re-starting using data in {0}".format(restart) )
indata = parsys["IN"].value
outdata = parsys["OUT"].value
niter = parsys["NITER"].value
pixsize = parsys["PIXSIZE"].value
config = parsys["CONFIG"].value
ref = parsys["REF"].value
mask2 = parsys["MASK2"].value
mask3 = parsys["MASK3"].value
extra = parsys["EXTRA"].value
extra1 = parsys["EXTRA1"].value
itermap = parsys["ITERMAP"].value
# See if we are using pre-cleaned data, in which case there is no need
# to export the cleaned data on the first iteration. Note we need to
# convert the string returned by "invoke" to an int explicitly, otherwise
# the equality is never satisfied and we end up assuming that the raw
# data has been precleaned, even if it hasn't been precleaned.
if int( invoke( "$KAPPA_DIR/configecho name=doclean config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=1".format(config))) == 1:
precleaned = False
else:
precleaned = True
# If requested, use numiter from the config file. Arbitrarily choose 850 um
# values for the waveband-specific parameters, but these are not actually used.
if niter == 0:
niter = int( invoke( "$KAPPA_DIR/configecho name=numiter config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=5".format(config)))
# If iterating to convergence, get the maximum allowed normalised map
# change between iterations, and set the number of iterations positive.
if niter < 0:
niter = -niter
maptol = float( invoke( "$KAPPA_DIR/configecho name=maptol config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=0.05".format(config)))
else:
maptol = 0
converged = False
# Determine the value of the (AST,COM,FLT).ZERO_NITER, ZERO_NOTLAST and
# ZERO_FREEZE parameters in the supplied config. We need to ensure that
# appropriate changes are made to the values of these on each invocation
# of makemap.
zero_niter = {}
zero_notlast = {}
zero_freeze = {}
for model in ["ast", "com", "flt"]:
zero_niter[model] = int( invoke( "$KAPPA_DIR/configecho name={0}.zero_niter config={1} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\"".format(model,config)))
zero_notlast[model] = int( invoke( "$KAPPA_DIR/configecho name={0}.zero_notlast config={1} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\"".format(model,config)))
zero_freeze[model] = int( invoke( "$KAPPA_DIR/configecho name={0}.zero_freeze config={1} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\"".format(model,config)))
# Similarly, we need to record com.freeze_flags.
com_freeze_flags = int( invoke( "$KAPPA_DIR/configecho name=com.freeze_flags config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\"".format(config)))
# Save parameter values to be used on the last iteration (-1.0 if unset)
filt_edge_largescale_last = float( invoke( "$KAPPA_DIR/configecho "
"name=flt.filt_edge_largescale_last config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=-1.0".format(config)))
filt_edge_smallscale_last = float( invoke( "$KAPPA_DIR/configecho "
"name=flt.filt_edge_smallscale_last config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=-1.0".format(config)))
filt_edgehigh_last = float( invoke( "$KAPPA_DIR/configecho "
"name=flt.filt_edgehigh_last config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=-1.0".format(config)))
filt_edgelow_last = float( invoke( "$KAPPA_DIR/configecho "
"name=flt.filt_edgelow_last config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=-1.0".format(config)))
flt_whiten_last = int( invoke( "$KAPPA_DIR/configecho "
"name=flt.whiten_last config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=-1".format(config)))
com_perarray_last = int( invoke( "$KAPPA_DIR/configecho "
"name=com.perarray_last config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=-1".format(config)))
# Get the number of iterations for which no AST model should be used.
ast_skip = int( invoke( "$KAPPA_DIR/configecho name=ast.skip config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\"".format(config)))
if ast_skip < 0 :
msg_out("\nThe ast.skip parameter is set to {0} in the supplied "
"config. skyloop does not handle negative ast.skip "
"values. Use makemap instead (there is no benefit "
"in a skyloop-style algorithm since no AST model "
"is used).".format(ast_skip))
cleanup()
sys.exit()
# See if low frequency changes are to be removed from the map on each
# iteration.
ast_filt_diff = float( invoke( "$KAPPA_DIR/configecho "
"name=ast.filt_diff config={0} "
"defaults=$SMURF_DIR/smurf_makemap.def "
"select=\"\'450=0,850=1\'\" defval=0.0".format(config)))
# The first invocation of makemap will create NDFs holding cleaned
# time-series data, EXT, LUT and NOI model values. The NDFs are created
# with hard-wired names and put in the current working directory. For
# tidyness, we will move the cleaned data files into the NDG temp
# directory, where all the other temp files are stored. In order to
# distinguish NDFs created by this script from any pre-existing NDFs
# - which we do not want to move, we now record the paths and
# last-accessed times of any relevant pre-existing NDFs. Note, if the
# "ext.import" config parameter is set, makemap expects EXT model
# values to be in the current working directory, so we do not move
# those NDFs to the NDG temp directory. Likewise for LUT and NOI model
# files. Use last-accessed times rather than inode numbers since something
# very strange seems to be happening with inode numbers for NDFs
# created by the starutil module (two succesive NDFs with the same
# path can have the same inode number).
orig_cln_ndfs = {}
for path in glob.glob("s*_con_res_cln.sdf"):
orig_cln_ndfs[path] = os.stat(path).st_atime
# Note any pre-existing NDFs holding extinction values.
orig_ext_ndfs = {}
for path in glob.glob("s*_con_ext.sdf"):
orig_ext_ndfs[path] = os.stat(path).st_atime
# Note any pre-existing NDFs holding NOI values.
orig_noi_ndfs = {}
for path in glob.glob("s*_con_noi.sdf"):
orig_noi_ndfs[path] = os.stat(path).st_atime
# Note any pre-existing NDFs holding LUT values.
orig_lut_ndfs = {}
for path in glob.glob("s*_con_lut.sdf"):
orig_lut_ndfs[path] = os.stat(path).st_atime
# Find the number of iterations to perform on the initial invocation of
# makemap.
niter0 = 1 + ast_skip
if niter0 > niter:
niter0 = niter
# On the first invocation of makemap, we use the raw data files specified
# by the IN parameter to create an initial estimate of the sky. We also
# save the cleaned time series data, and the EXT, LUT and NOI models (if we
# are doing more than one iteration), for use on subsequent iterations (this
# speeds them up a bit). First create a text file holding a suitably modified
# set of configuration parameters. This file is put in the NDG temp
# directory (which is where we store all temp files).
conf0 = os.path.join(NDG.tempdir,"conf0") # Full path to new config file
fd = open(conf0,"w") # Open the new config file.
fd.write("{0}\n".format(config)) # Inherit the supplied config parameter values.
fd.write("numiter={0}\n".format(niter0)) # MAKEMAP should do only one
# iteration (plus any skipped iterations).
fd.write("itermap=0\n") # Itermaps don't make sense
fd.write("bolomap=0\n") # Bolomaps don't make sense
fd.write("shortmap=0\n") # Shortmaps don't make sense
fd.write("flagmap=<undef>\n")# Flagmaps don't make sense
fd.write("sampcube=0\n") # Sampcubes don't make sense
if niter > 1:
fd.write("noi.export=1\n") # Export the NOI model. This forces the
# NOI model to be created and exported after
# the first iteration has completed.
fd.write("exportNDF=(lut,ext)\n")# Save the EXT, LUT model values to avoid
# re-calculation on each invocation of makemap.
fd.write("noexportsetbad=1\n")# Export good EXT values for bad bolometers
if not precleaned:
fd.write("exportclean=1\n") # Likewise save the cleaned time-series data.
fd.write("ast.zero_notlast = 0\n") # Masking is normally not performed
fd.write("flt.zero_notlast = 0\n") # on the last iteration. But the first
fd.write("com.zero_notlast = 0\n") # iteration is also the last iteration
# in our case, so force any enabled
# masking to be performed on the last iteration.
fd.write("diag.append = 0\n") # Ensure a new diagnostics file is started
fd.write("flt.filt_edge_largescale_last=<undef>\n") # Ensure these parameter
fd.write("flt.filt_edge_smallscale_last=<undef>\n") # are only used on the
fd.write("flt.filt_edgehigh_last=<undef>\n") # final iteration. We
fd.write("flt.filt_edgelow_last=<undef>\n") # reset them here in
fd.write("flt.whiten_last=<undef>\n") # case they are set in
fd.write("com.perarray_last=<undef>\n") # the supplied config.
if precleaned:
fd.write("downsampscale = 0\n") # Cleaned data will have been downsampled already.
fd.write("downsampfreq = 0\n")
fd.close() # Close the config file.
# Get the name of a temporary NDF that can be used to store the first
# iteration map. This NDF is put in the NDG temp directory. If we are
# only doing one iteration, used the supplied output NDF name.
if niter == 1:
newmap = outdata
else:
newmap = NDG(1)
prevmap = None
# Start a list of these maps in case we are creating an output itermap cube.
maps = []
maps.append(newmap)
# If we are restarting, check if the NDF already exists and is readable.
# If so, we do not re-create it.
msg_out( "Iteration 1...")
gotit = False
if restart != None:
try:
invoke("$KAPPA_DIR/ndftrace ndf={0} quiet=yes".format(newmap))
msg_out( "Re-using existing map {0}".format(newmap) )
gotit = True
# Get the pixel index bounds of the map.
lx = starutil.get_task_par( "lbound(1)", "ndftrace" )
ly = starutil.get_task_par( "lbound(2)", "ndftrace" )
ux = starutil.get_task_par( "ubound(1)", "ndftrace" )
uy = starutil.get_task_par( "ubound(2)", "ndftrace" )
except:
pass
# If required, construct the text of the makemap command and invoke it.
if not gotit:
cmd = "$SMURF_DIR/makemap in={0} out={1} method=iter config='^{2}'".format(indata,newmap,conf0)
if pixsize:
cmd += " pixsize={0}".format(pixsize)
if ref:
cmd += " ref={0}".format(ref)
if mask2:
cmd += " mask2={0}".format(mask2)
if mask3:
cmd += " mask3={0}".format(mask3)
if extra:
cmd += " "+extra
if extra1:
cmd += " "+extra1
invoke(cmd)
# Get the pixel index bounds of the map.
lx = starutil.get_task_par( "lbound(1)", "makemap" )
ly = starutil.get_task_par( "lbound(2)", "makemap" )
ux = starutil.get_task_par( "ubound(1)", "makemap" )
uy = starutil.get_task_par( "ubound(2)", "makemap" )
# Unless the supplied data was pre-cleaned, the NDFs holding the cleaned
# time-series data will have been created by makemap in the current working
# directory. Move them to the NDG temporary directory. Avoid moving any
# other files that have similar names by checking each file inode number and
# last-accessed time against the lists of inode numbers and times that
# existed before makemap was run.
if niter > 1:
if not precleaned:
for ndf in glob.glob("s*_con_res_cln.sdf"):
if not ndf in orig_cln_ndfs:
shutil.move( ndf, NDG.tempdir )
elif os.stat(ndf).st_atime > orig_cln_ndfs[ndf]:
shutil.move( ndf, NDG.tempdir )
# Get a list of the extinction correction files created by the first
# invocation of makemap.
for ndf in glob.glob("s*_con_ext.sdf"):
if not ndf in orig_ext_ndfs:
new_ext_ndfs.append(ndf)
elif os.stat(ndf).st_atime > orig_ext_ndfs[ndf]:
new_ext_ndfs.append(ndf)
# Get a list of the LUT files created by the first invocation of makemap.
for ndf in glob.glob("s*_con_lut.sdf"):
if not ndf in orig_lut_ndfs:
new_lut_ndfs.append(ndf)
elif os.stat(ndf).st_atime > orig_lut_ndfs[ndf]:
new_lut_ndfs.append(ndf)
# Get a list of the NOI model files created by the first invocation of
# makemap.
for ndf in glob.glob("s*_con_noi.sdf"):
if not ndf in orig_noi_ndfs:
new_noi_ndfs.append(ndf)
elif os.stat(ndf).st_atime > orig_noi_ndfs[ndf]:
new_noi_ndfs.append(ndf)
# Get the paths to the the moved cleaned files.
if niter > 1:
if not precleaned:
cleaned = NDG( os.path.join( NDG.tempdir,"s*_con_res_cln.sdf"))
else:
cleaned = indata
# Now do the second and subsequent iterations. These use the cleaned
# time-series data created by the first iteration as their time-series
# input, and use the output map from the previous iteration as their
# initial guess at the sky. First create a map holding things to add
# to the config for subsequent invocations.
add = {}
add["exportNDF"] = 0 # Prevent EXT or LUT model being exported.
add["exportclean"] = 0 # Prevent cleaned time-series data being exported.
add["doclean"] = 0 # Do not clean the supplied data (it has
add["importsky"] = "ref" # Get the initial sky estimate from the REF parameter.
add["importlut"] = 1 # Import the LUT model created by the first iteration.
add["ext.import"] = 1 # Import the EXT model created by the first iteration.
add["flt.notfirst"] = 0 # Ensure we use FLT on 2nd and subsequent invocations
add["pln.notfirst"] = 0 # Ensure we use PLN on 2nd and subsequent invocations
add["smo.notfirst"] = 0 # Ensure we use SMO on 2nd and subsequent invocations
add["diag.append"] = 1 # Ensure we append diagnostics to the file
# created on the first iteration.
add["ast.skip"] = 0 # Ensure we do not skip any more AST models
add["noi.import"] = 1 # Use the NOI model created by iteration 1
add["noi.export"] = 0 # No need to export the NOI model again
if ast_skip > 0:
add["numiter"] = 1 # First invocation used (1+ast_skip) iterations
add["downsampscale"] = 0 # Iter. 1 did any required downsampling. Later iters
add["downsampfreq"] = 0 # must not further downsampling because the cache files
# are only appropriate for the origin downsampling.
add["fakemap"] = "<undef>" # Iter. 1 added any required fakemap.
# Now create the config, inheriting the config from the first invocation.
iconf = 1
confname = os.path.join(NDG.tempdir,"conf1")
fd = open(confname,"w")
fd.write("^{0}\n".format(conf0))# Inherit the first iteration config.
for key in add:
fd.write("{0}={1}\n".format( key, add[key] ))
fd.close()
# Indicate we do not need to create a new config file yet.
newcon = 0
# Get the name of an NDF in which to store the normalized map change
# after each iteration.
mapchange = NDG(1)
# Now do the second (assuming none have been skipped) and subsequent
# iterations.
if ast_skip > 0:
msg_out( "Skipping {0} iterations since ast.skip is set to {0}".format(ast_skip))
iter = niter0 + 1
while iter <= niter:
msg_out( "Iteration {0}...".format(iter))
# On this iteration we will want to use the output map from the previous
# iteration as the initial guess at the sky. So copy the new map name over
# to the "prevmap" variable.
prevmap = newmap
# When "zero_niter" invocations have been performed, switch off zero
# masking (so long as zero_niter > 0). Do this for AST, COM and FLT
# models.
for model in ["ast", "com", "flt"]:
if zero_niter[model] > 0 and iter > zero_niter[model]:
zero_niter[model] = 0
add[ model+".zero_niter" ] = -1
newcon = 1
# When "zero_freeze" invocations have been performed, switch freeze the
# mask (so long as zero_freeze > 0). Do this for AST, COM and FLT models.
for model in ["ast", "com", "flt"]:
if zero_freeze[model] > 0 and iter > zero_freeze[model] + 1:
zero_freeze[model] = 0
add[ model+".zero_freeze" ] = -1
newcon = 1
# When "com_freeze_flags" invocations have been performed, freeze the
# COM flags (so long as com_freeze_flags > 0).
if com_freeze_flags > 0 and iter > com_freeze_flags + 1:
com_freeze_flags = 0
add[ "com.freeze_flags" ] = -1
newcon = 1
# If this is the last iteration, put the output map in the NDF specified
# by the script's "OUT" parameter.
if iter == niter:
newmap = outdata
# Also, if this is the last iteration, create a modified configuration file
# that supresses masking (unless the xxx.zero_notlast value in the
# supplied config indicates otherwise).
for model in ["ast", "com", "flt"]:
if zero_notlast[model] != 0:
add["ast.zero_notlast"] = 1
newcon = 1
# Also, if this is the last iteration, do not remove low frequency
# changes from the map.
if ast_filt_diff != 0.0:
add["ast.filt_diff"] = 0.0
newcon = 1
# Also override the normal values for parameters that have a
# corresponding "_last" value.
if filt_edge_largescale_last != -1.0:
add["flt.filt_edge_largescale_last"] = filt_edge_largescale_last
newcon = 1
if filt_edge_smallscale_last != -1.0:
add["flt.filt_edge_smallscale_last"] = filt_edge_smallscale_last
newcon = 1
if filt_edgehigh_last != -1.0:
add["flt.filt_edgehigh_last"] = filt_edgehigh_last
newcon = 1
if filt_edgelow_last != -1.0:
add["flt.filt_edgelow_last"] = filt_edgelow_last
newcon = 1
if flt_whiten_last != -1:
add["flt.whiten_last"] = flt_whiten_last
newcon = 1
if com_perarray_last != -1:
add["com.perarray_last"] = com_perarray_last
newcon = 1
# If this is not the last iteration, get the name of a temporary NDF that
# can be used to store the current iteration's map. This NDF is put in
# the NDG temp directory.
else:
newmap = NDG(1)
# If required, create a new config file.
if newcon:
newcon = 0
iconf += 1
confname = os.path.join(NDG.tempdir,"conf{0}".format(iconf))
fd = open(confname,"w")
fd.write("^{0}\n".format(conf0))# Inherit the first iteration config.
for key in add:
fd.write("{0}={1}\n".format( key, add[key] ))
fd.close()
# See if the output NDF already exists.
gotit = False
if restart != None:
try:
invoke("$KAPPA_DIR/ndftrace ndf={0} quiet=yes".format(newmap))
msg_out( "Re-using existing map {0}".format(newmap) )
gotit = True
except:
pass
# If required, construct the text of the makemap command and invoke it. We
# specify the map from the previous iteration as the REF image. Since we are
# re-using the LUT model from the first invocation, we need to ensure that
# the maps bounds never change (as they may because of new data being
# flagged for instance). So specify them explicitly when running makemap.
if not gotit:
# put into different observations
if converged:
nparts, cleanParts, outParts, outFolder = inputAdjuster(str(cleaned)[2:-1], outFolder+ str(newmap)[1:-5])
else:
nparts, cleanParts, outParts, outFolder = inputAdjuster(str(cleaned)[2:-1], str(newmap)[1:-1])
for i in range(0,nparts):
cmd = "$SMURF_DIR/makemap in={0} out={1} method=iter config='^{2}' ref={3} lbnd=\[{4},{5}\] ubnd=\[{6},{7}\]".format(cleanParts[i],outParts[i],confname,prevmap,lx,ly,ux,uy)
if pixsize:
cmd += " pixsize={0}".format(pixsize)
if mask2:
cmd += " mask2={0}".format(mask2)
if mask3:
cmd += " mask3={0}".format(mask3)
if extra:
cmd += " "+extra
invoke(cmd)
# mosaic files togethers
cwd = os.getcwd()
os.chdir(outFolder)
cmd = '${ORAC_DIR}/etc/picard_start.sh -log sf -nodisplay MOSAIC_JCMT_IMAGES '+ outFolder +'*toMOS.sdf'
invoke(cmd)
if converged:
os.system('mv '+ outFolder + '*_mos.sdf ' + str(newmap)[1:-1] + ".sdf")
else:
os.system('mv '+ outFolder + '*_mos.sdf ' + outFolder+str(newmap)[1:-1] + ".sdf")
os.system('rm ' + outFolder + '*part*')
os.chdir(cwd)
# The quality array in the new map will not be of much use since it will
# have been created on the basis of maps made from individual chunks, rather
# than the total coadded map. This will causes the following estimation
# of the normalised change to be wrong. So we copy the quality mask from
# the previous map to the new map, and use that instead (this mask was
# created when the previous map was read into makemap). This also helps
# if the mask is frozen by one of the xxx.zero_freeze config parameters.
if prevmap != None:
try:
invoke("$KAPPA_DIR/setqual ndf={0} like={1}".format(newmap,prevmap) )
except starutil.StarUtilError as err:
pass
# If required, get the mean normalised map change, and see if it has
# dropped below maptol. If so, we must do one further iteration to
# ensure that the masking is not visible in the final map.
if converged == False:
invoke("$KAPPA_DIR/setbb ndf={0} bb=1".format(newmap) )
invoke("$KAPPA_DIR/maths exp=\"'abs(ia-ib)/sqrt(va)'\" ia={0} "
"ib={1} out={2}".format(newmap,prevmap,mapchange))
invoke("$KAPPA_DIR/setbb ndf={0} bb=0".format(newmap) )
invoke("$KAPPA_DIR/stats ndf={0} clip=\\[3,3,3\\] quiet".format(mapchange))
meanchange = starutil.get_task_par( "mean", "stats" )
if maptol > 0.0 and converged == False:
msg_out( "Normalised mean change in map = {0} (maptol="
"{1})".format(meanchange,maptol) )
if meanchange <= maptol:
msg_out( "Converged! But we need to do one more iteration..." )
converged = True
niter = iter + 1
else:
msg_out( "Normalised mean change in map = {0}".format(meanchange) )
# Append the output map name to the list of maps to be included in any
# itermap cube.
maps.append(newmap)
# Increment the iteration number
iter += 1
# Report convergence failure.
if maptol > 0.0 and not converged:
msg_out("Map did not converge.")
# Now we have done all iterations, create the output itermap cube if
# required.
if itermap and niter > 1:
msg_out( "Creating output itermap cube {0}...".format(itermap) )
inputs = NDG( maps )
invoke("$KAPPA_DIR/paste in={0} out={1} shift=\[0,0,1\]".format(inputs,itermap) )
# Remove temporary files.