-
Notifications
You must be signed in to change notification settings - Fork 10
/
glowdriver.f90
438 lines (417 loc) · 15.5 KB
/
glowdriver.f90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
program glowdriver
! This software is part of the GLOW model. Use is governed by the Open Source
! Academic Research License Agreement contained in the file glowlicense.txt.
! For more information see the file glow.txt.
! Version 0.981, 6/2017
! Stan Solomon and Ben Foster, 1/2015
! Stan Solomon, 12/2015, 1/2016
! Stan Solomon, 3/2016, MPI parallel version
! Main multi-processor driver for the GLOW model.
! Uses TIE-GCM history files or MSIS/IRI for input.
! Requires MPI and netCDF libraries
! For definitions of use-associated variables, see subroutine GLOW and module CGLOW.
! For definitions of TGCM input variables see module READTGCM
! For definitions of output arrays see module OUTPUT
! Other definitions:
! f107p Solar 10.7 cm flux for previous day
! ap Ap index of geomagnetic activity
! z altitude array, km
! Array dimensions:
! jmax number of altitude levels
! nbins number of energetic electron energy bins
! lmax number of wavelength intervals for solar flux
! nmaj number of major species
! nst number of states produced by photoionization/dissociation
! nei number of states produced by electron impact
! nex number of ionized/excited species
! nw number of airglow emission wavelengths
! nc number of component production terms for each emission
use mpi
use cglow,only: cglow_init ! subroutine to allocate use-associated variables
use cglow,only: jmax,nbins,lmax,nmaj,nei,nex,nw,nc,nst
use cglow,only: idate,ut,glat,glong,f107a,f107,f107p,ap,ef,ec
use cglow,only: iscale,jlocal,kchem,xuvfac
use cglow,only: sza,dip,efrac,ierr
use cglow,only: zz,zo,zn2,zo2,zns,znd,zno,ztn,ze,zti,zte
use cglow,only: ener,del,phitop,wave1,wave2,sflux,pespec,sespec,uflx,dflx,sion
use cglow,only: photoi,photod,phono,aglw,ecalc,zxden,zeta,zceta,zlbh
use cglow,only: data_dir
use readtgcm,only: read_tgcm ! subroutine to read tgcm history file
use readtgcm,only: read_tgcm_coords ! subroutine to read tgcm history coordinates
use readtgcm,only: find_mtimes ! subroutine to find model times on history file
use readtgcm,only: nlon_tgcm=>nlon, glon_tgcm=>glon
use readtgcm,only: nlat_tgcm=>nlat, glat_tgcm=>glat
use readtgcm,only: nlev_tgcm=>nlev
use readtgcm,only: iyear_tgcm=>iyear,iday_tgcm=>iday,ut_tgcm=>ut
use readtgcm,only: f107_tgcm=>f107d,f107a_tgcm=>f107a,hpower
use readtgcm,only: alfacusp=>alfac,ecusp=>ec,alfadrizzle=>alfad,edrizzle=>ed
use readtgcm,only: eflux,nflux,alfa,drizzle,cusp
use readtgcm,only: zg,tn,un,vn,o2,o1,n2,n4s,n2d,no,ti,te,ne
use readtgcm,only: mxtimes
use output,only: output_init ! subroutine to allocate output arrays
use output,only: create_ncfile ! subroutine to create netcdf output file
use output,only: write_ncfile ! subroutine to write data to netcdf output file
use output,only: nlon,nlat,nlev ! grid dimensions (set here in glow_drv)
use output,only: lon,lat,lev ! grid coordinates (set here in glow_drv)
use output,only: writelbh,writered ! switches
use output,only: zzz,ao,ao2,an2,ano,an4s,an2d,atn,ati,ate,ane,aun,avn, &
nec,ped,hall,xden,eta,lbh,redline ! output arrays
implicit none
character(len=1024) :: &
tgcm_ncfile, & ! path to tgcm history file (tiegcm or timegcm)
glow_ncfile, & ! name for of netCDF glow output files
glow_ncfileit, & ! path to individual netCDF glow output file (integer appended)
iri90_dir ! directory containing iri data files
character(len=7) :: ifile
real,allocatable :: z(:) ! glow height coordinate in km (jmax)
real,allocatable :: zun(:),zvn(:) ! winds on glow grid
real,allocatable :: pedcond(:), hallcond(:) ! Pederson and Hall conductivities in S/m (mho)
real,allocatable :: outf(:,:) ! iri output (11,jmax)
real,allocatable :: recbuf(:,:) ! receive buffer for MPI gather
real :: utstart,utstep,utstop
real :: rz12,stl,fmono,emono,kp
real :: d(8), t(2), sw(25), oarr(30)
integer nproc,itask,mpierr,lat0,lat1,size2d,size3d,sizeb
integer :: l,j,jj,ijf,jmag,iday,mmdd,i,ii,n,k,ix,itail,m
integer :: nlat_msis, nlon_msis ! number of lats and lons in grid for MSIS/IRI runs
integer :: start_mtime(3) ! model start time (day,hour,minute)
integer :: stop_mtime(3) ! model stop time (day,hour,minute)
integer :: indate,itime,ntimes,mtimes(3,mxtimes),itimes(mxtimes)
logical :: tgcm, first, jf(12)
data sw/25*1./, first/.true./
namelist /glow_input/ &
indate,utstart,utstep,utstop,nlat_msis,nlon_msis,f107a,f107,f107p,ap, &
iscale,jlocal,kchem,xuvfac,ef,ec,itail,fmono,emono, &
tgcm_ncfile,iri90_dir,jmax,glow_ncfile, &
start_mtime,stop_mtime,data_dir,writelbh,writered
! Execute:
!
! Set up MPI:
!
call mpi_init(mpierr)
call mpi_comm_rank(MPI_COMM_WORLD,itask,mpierr)
call mpi_comm_size(MPI_COMM_WORLD,nproc,mpierr)
!
! Initialize tgcm_ncfile and start/stop times:
! If start_mtime and/or stop_mtime are not read from namelist, model days of -999
! will flag find_mtimes to find the first and/or last histories on the file.
!
tgcm_ncfile = ' '
start_mtime = (/-999,0,0/)
stop_mtime = (/-999,0,0/)
!
! Root task only: Read namelist inputs from input file.
! Read times, coordinates, 1D vars, from tgcm history file (tiegcm or timegcm), if provided.
! If tgcm history file is not provided, will use namelist inputs for MSIS/IRI/NOEM:
!
if (itask == 0) then
read (5,nml=glow_input)
if (len_trim(tgcm_ncfile) > 0) then
tgcm = .true.
call read_tgcm_coords(tgcm_ncfile)
ntimes = find_mtimes(tgcm_ncfile,start_mtime,stop_mtime,mtimes,itimes)
else
tgcm = .false.
ntimes = ifix((utstop-utstart)/utstep) + 1
write(6,"('glow_drv: tgcm_ncfile not provided, will use MSIS/IRI')")
endif
endif
!
! Broadcast namelist inputs, tgcm coordinates, times, etc., to all processors:
!
call mpi_bcast(tgcm,1,MPI_LOGICAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(data_dir,1024,MPI_CHARACTER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(iscale,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(jlocal,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(kchem,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(xuvfac,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(itail,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ntimes,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
if (tgcm) then
call mpi_bcast(itimes,mxtimes,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(nlon_tgcm,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(nlat_tgcm,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
if (itask /= 0) then
allocate(glon_tgcm(nlon_tgcm))
allocate(glat_tgcm(nlat_tgcm))
allocate(eflux(nlon_tgcm,nlat_tgcm))
allocate(alfa(nlon_tgcm,nlat_tgcm))
endif
call mpi_bcast(glon_tgcm,nlon_tgcm,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(glat_tgcm,nlat_tgcm,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
else
call mpi_bcast(nlon_msis,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(nlat_msis,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ef,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ec,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(fmono,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(emono,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
endif
!
! Set grid dimensions:
!
if (tgcm) then
jmax=68
nlev = jmax
nlat = nlat_tgcm
nlon = nlon_tgcm
else
jmax = 102
nlev = jmax
nlat = nlat_msis
nlon = nlon_msis
endif
!
! Allocate arrays in other modules (formerly in common blocks):
!
call cglow_init
call output_init
!
! Set up grid:
!
if (tgcm) then
lon(:) = glon_tgcm(:)
lat(:) = glat_tgcm(:)
do k=1,jmax
lev(k)=float(k)/4.-10.25
enddo
else
do i=1,nlon
lon(i) = float(i-1)/float(nlon_msis)*360. - 180.
enddo
do j=1,nlat
lat(j) = float(j-1)/float(nlat_msis)*180. - 90. + 90./float(nlat_msis)
enddo
lev(:)=0.
endif
!
! Assign latitude bands to processors:
!
if (itask == 0 .and. (nlat/nproc)*nproc /= nlat) then
write(6,"('glow_drv: number of latitudes must be an integer multiple of number of processors')")
write(6,"('NLAT =',i3,' NPROC =',i3)") nlat,nproc
stop
endif
size2d=nlon*nlat
size3d=nlon*nlat*nlev
sizeb=nlon*nlat/nproc
lat0 = 1 + itask * nlat/nproc
lat1 = (1+itask) * nlat/nproc
!
! Allocate local arrays:
!
allocate(z(jmax))
allocate(zun(jmax))
allocate(zvn(jmax))
allocate(pedcond(jmax))
allocate(hallcond(jmax))
allocate(outf(11,jmax))
allocate(recbuf(nlon,nlat))
!
! Set electron energy grid:
!
call egrid (ener, del, nbins)
!
! Time loop:
!
do itime=1,ntimes
!
! Get input fields if this is a tgcm run, otherwise use namelist inputs to MSIS/IRI/NOEM.
! Only do this section if this is the root task:
!
if (itask == 0) then
if (tgcm) then
call read_tgcm(tgcm_ncfile,itimes(itime))
idate=iyear_tgcm*1000+iday_tgcm
ut =ut_tgcm*3600.
f107 =f107_tgcm(itime)
f107p=f107_tgcm(itime)
f107a=f107a_tgcm(itime)
kp=alog((hpower(itime)+4.86)/16.82)/.32
ap=2.2*exp(.6*kp)
else
idate=indate
ut =utstart+(itime-1)*utstep
endif
write(6,"('glow_drv: idate=',i7,' ut=',f7.1)") idate,ut
write(6,"('glow_drv: F107=',f5.1,' F107a=',f5.1,' F107p=',f5.1,' Ap=',f5.1)")f107,f107a,f107p,ap
!
! Loop over latitude and longitude to interpolate input fields:
!
do l=1,nlat
glat = lat(l)
do i=1,nlon
glong = lon(i)
!
! If this is a tgcm run, use altitude grid, fields, and auroral inputs from tgcm history.
! Otherwise, use default altitude grid, MSIS/NOEM/IRI fields, and namelist auroral inputs:
!
if (tgcm) then
call tzgrid(i,l,jmax,z,zo,zo2,zn2,zns,znd,zno,ztn,zun,zvn,ze,zti,zte)
else
stl = ut/3600. + glong/15.
if (stl < 0.) stl = stl + 24.
if (stl >= 24.) stl = stl - 24.
call mzgrid (jmax,nex,idate,ut,glat,glong,stl,f107a,f107,f107p,ap,iri90_dir, &
z,zo,zo2,zn2,zns,znd,zno,ztn,zun,zvn,ze,zti,zte,zxden)
endif
!
! Fill global arrays:
!
zzz(i,l,:) = z(:) * 1.e5 !km to cm at all altitude levels
ao(i,l,:) = zo(:)
ao2(i,l,:) = zo2(:)
an2(i,l,:) = zn2(:)
ano(i,l,:) = zno(:)
an4s(i,l,:) = zns(:)
an2d(i,l,:) = znd(:)
atn(i,l,:) = ztn(:)
ati(i,l,:) = zti(:)
ate(i,l,:) = zte(:)
aun(i,l,:) = zun(:)
avn(i,l,:) = zvn(:)
ane(i,l,:) = ze(:)
enddo ! longitude loop
enddo ! latitude loop
endif ! bottom of root-task-only conditional for input/interpolation section
!
! Broadcast idate, ut, f107, f107a, 3D fields, (and 2D auroral fields), to all processors:
!
call mpi_bcast(idate,1,MPI_INTEGER,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ut,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(f107,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(f107a,1,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(zzz,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ao,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ao2,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(an2,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ano,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(an4s,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(an2d,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(atn,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ati,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ate,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(aun,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(avn,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(ane,size3d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (tgcm) then
call mpi_bcast(eflux,size2d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
call mpi_bcast(alfa,size2d,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
endif
!
! Loop over all subdomain latitudes and longitudes for calls to GLOW:
!
do l=lat0,lat1
glat = lat(l)
do i=1,nlon
glong = lon(i)
phitop(:) = 0.
!
! Get auroral inputs from TGCM history or from namelist inputs:
!
if (tgcm) then
ef=eflux(i,l)
ec=alfa(i,l)*1000. ! keV to eV
if (ef>.001 .and. ec>1.) call maxt(ef,ec,ener,del,nbins,itail,fmono,emono,phitop)
else
if (ef>.001 .and. ec>1.) call maxt (ef,ec,ener,del,nbins,itail,fmono,emono,phitop)
endif
!
! Transfer global fields back to altitude arrays at specific lat/lon:
!
zz(:) = zzz(i,l,:)
zo(:) = ao(i,l,:)
zo2(:) = ao2(i,l,:)
zn2(:) = an2(i,l,:)
zno(:) = ano(i,l,:)
zns(:) = an4s(i,l,:)
znd(:) = an2d(i,l,:)
ztn(:) = atn(i,l,:)
zti(:) = ati(i,l,:)
zte(:) = ate(i,l,:)
zun(:) = aun(i,l,:)
zvn(:) = avn(i,l,:)
ze(:) = ane(i,l,:)
!
! Call GLOW to calculate ionized and excited species, and airglow emission rates:
!
call glow
!
! Write error code and energy conservation to standard output if it is out of range:
!
if (ierr > 0) write(6,"('glow_drv: IERR = ',i1)") ierr
if (abs(efrac) > 0.2) &
write(6,"('glow_drv: EFRAC =',f5.2,' SZA =',f5.2,' DIP=',f5.2)") efrac,sza,dip
!
! Call CONDUCT to calculate Pederson and Hall conductivities:
!
do j=1,jmax
call conduct (glat, glong, z(j), zo(j), zo2(j), zn2(j), &
zxden(3,j), zxden(6,j), zxden(7,j), ztn(j), zti(j), zte(j), &
pedcond(j), hallcond(j))
enddo
!
! Collect arrays for output:
!
do j=1,jmax
nec(i,l,j) = ecalc(j)
ped(i,l,j) = pedcond(j)
hall(i,l,j) = hallcond(j)
do ix=1,nex
xden(i,l,j,ix) = zxden(ix,j)
enddo
do ix=1,nw
eta(i,l,j,ix) = zeta(ix,j)
enddo
do ix=1,nc
lbh(i,l,j,ix) = zlbh(ix,j)
redline(i,l,j,ix)=zceta(ix,5,j)
enddo
enddo
enddo ! longitude loop
enddo ! latitude loop
!
! Gather fields to root task for output:
!
do j=1,jmax
call mpi_gather &
(nec(:,lat0:lat1,j),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) nec(:,:,j)=recbuf(:,:)
call mpi_gather &
(ped(:,lat0:lat1,j),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) ped(:,:,j)=recbuf(:,:)
call mpi_gather &
(hall(:,lat0:lat1,j),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) hall(:,:,j)=recbuf(:,:)
do ix=1,nex
call mpi_gather &
(xden(:,lat0:lat1,j,ix),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) xden(:,:,j,ix) = recbuf(:,:)
enddo
do ix=1,nw
call mpi_gather &
(eta(:,lat0:lat1,j,ix),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) eta(:,:,j,ix) = recbuf(:,:)
enddo
do ix=1,nc
call mpi_gather &
(lbh(:,lat0:lat1,j,ix),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) lbh(:,:,j,ix) = recbuf(:,:)
call mpi_gather &
(redline(:,lat0:lat1,j,ix),sizeb,MPI_REAL,recbuf,sizeb,MPI_REAL,0,MPI_COMM_WORLD,mpierr)
if (itask == 0) redline(:,:,j,ix) = recbuf(:,:)
enddo
enddo
!
! Output section:
! Create and define a new netCDF output file for each time (root task only):
!
if (itask == 0) then
write (ifile,"('.',i3.3,'.nc')"),itime
glow_ncfileit = trim(glow_ncfile) // ifile
call create_ncfile(glow_ncfileit,tgcm_ncfile)
call write_ncfile(glow_ncfileit)
endif
enddo ! bottom of time loop
call mpi_finalize(mpierr)
end program glowdriver