LMDZ
surf_land_orchidee_noopenmp_mod.F90
Go to the documentation of this file.
1 !
2 ! $Id$
3 !
5 !
6 ! This module is compiled only if CPP key ORCHIDEE_NOOPENMP is defined.
7 ! This module should be used with ORCHIDEE sequentiel or parallele MPI version (not MPI-OpenMP mixte)
8 
9 #ifdef ORCHIDEE_NOOPENMP
10 !
11 ! This module controles the interface towards the model ORCHIDEE
12 !
13 ! Subroutines in this module : surf_land_orchidee
14 ! Init_orchidee_index
15 ! Get_orchidee_communicator
16 ! Init_neighbours
17  USE dimphy
18 #ifdef CPP_VEGET
19  USE intersurf ! module d'ORCHIDEE
20 #endif
21  USE cpl_mod, ONLY : cpl_send_land_fields
22  USE surface_data, ONLY : type_ocean
23  USE geometry_mod, ONLY : dx, dy
26 
27  IMPLICIT NONE
28 
29  PRIVATE
30  PUBLIC :: surf_land_orchidee
31 
32 CONTAINS
33 !
34 !****************************************************************************************
35 !
36  SUBROUTINE surf_land_orchidee(itime, dtime, date0, knon, &
37  knindex, rlon, rlat, pctsrf, &
38  debut, lafin, &
39  plev, u1_lay, v1_lay, temp_air, spechum, epot_air, ccanopy, &
40  tq_cdrag, petacoef, peqacoef, petbcoef, peqbcoef, &
41  precip_rain, precip_snow, lwdown, swnet, swdown, &
42  ps, q2m, t2m, &
43  evap, fluxsens, fluxlat, &
44  tsol_rad, tsurf_new, alb1_new, alb2_new, &
45  emis_new, z0_new, qsurf)
46 !
47 ! Cette routine sert d'interface entre le modele atmospherique et le
48 ! modele de sol continental. Appel a sechiba
49 !
50 ! L. Fairhead 02/2000
51 !
52 ! input:
53 ! itime numero du pas de temps
54 ! dtime pas de temps de la physique (en s)
55 ! nisurf index de la surface a traiter (1 = sol continental)
56 ! knon nombre de points de la surface a traiter
57 ! knindex index des points de la surface a traiter
58 ! rlon longitudes de la grille entiere
59 ! rlat latitudes de la grille entiere
60 ! pctsrf tableau des fractions de surface de chaque maille
61 ! debut logical: 1er appel a la physique (lire les restart)
62 ! lafin logical: dernier appel a la physique (ecrire les restart)
63 ! (si false calcul simplifie des fluxs sur les continents)
64 ! plev hauteur de la premiere couche (Pa)
65 ! u1_lay vitesse u 1ere couche
66 ! v1_lay vitesse v 1ere couche
67 ! temp_air temperature de l'air 1ere couche
68 ! spechum humidite specifique 1ere couche
69 ! epot_air temp pot de l'air
70 ! ccanopy concentration CO2 canopee, correspond au co2_send de
71 ! carbon_cycle_mod ou valeur constant co2_ppm
72 ! tq_cdrag cdrag
73 ! petAcoef coeff. A de la resolution de la CL pour t
74 ! peqAcoef coeff. A de la resolution de la CL pour q
75 ! petBcoef coeff. B de la resolution de la CL pour t
76 ! peqBcoef coeff. B de la resolution de la CL pour q
77 ! precip_rain precipitation liquide
78 ! precip_snow precipitation solide
79 ! lwdown flux IR descendant a la surface
80 ! swnet flux solaire net
81 ! swdown flux solaire entrant a la surface
82 ! ps pression au sol
83 ! radsol rayonnement net aus sol (LW + SW)
84 !
85 !
86 ! output:
87 ! evap evaporation totale
88 ! fluxsens flux de chaleur sensible
89 ! fluxlat flux de chaleur latente
90 ! tsol_rad
91 ! tsurf_new temperature au sol
92 ! alb1_new albedo in visible SW interval
93 ! alb2_new albedo in near IR interval
94 ! emis_new emissivite
95 ! z0_new surface roughness
96 ! qsurf air moisture at surface
97 !
99  USE indice_sol_mod
100  USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat
101  USE print_control_mod, ONLY: lunout
102 #ifdef CPP_VEGET
103  USE time_phylmdz_mod, ONLY: itau_phy
104 #endif
105  IMPLICIT NONE
106 
107  include "YOMCST.h"
108 
109 !
110 ! Parametres d'entree
111 !****************************************************************************************
112  INTEGER, INTENT(IN) :: itime
113  REAL, INTENT(IN) :: dtime
114  REAL, INTENT(IN) :: date0
115  INTEGER, INTENT(IN) :: knon
116  INTEGER, DIMENSION(klon), INTENT(IN) :: knindex
117  LOGICAL, INTENT(IN) :: debut, lafin
118  REAL, DIMENSION(klon,nbsrf), INTENT(IN) :: pctsrf
119  REAL, DIMENSION(klon), INTENT(IN) :: rlon, rlat
120  REAL, DIMENSION(klon), INTENT(IN) :: plev
121  REAL, DIMENSION(klon), INTENT(IN) :: u1_lay, v1_lay
122  REAL, DIMENSION(klon), INTENT(IN) :: temp_air, spechum
123  REAL, DIMENSION(klon), INTENT(IN) :: epot_air, ccanopy
124  REAL, DIMENSION(klon), INTENT(IN) :: tq_cdrag
125  REAL, DIMENSION(klon), INTENT(IN) :: petacoef, peqacoef
126  REAL, DIMENSION(klon), INTENT(IN) :: petbcoef, peqbcoef
127  REAL, DIMENSION(klon), INTENT(IN) :: precip_rain, precip_snow
128  REAL, DIMENSION(klon), INTENT(IN) :: lwdown, swnet, swdown, ps
129  REAL, DIMENSION(klon), INTENT(IN) :: q2m, t2m
130 
131 ! Parametres de sortie
132 !****************************************************************************************
133  REAL, DIMENSION(klon), INTENT(OUT) :: evap, fluxsens, fluxlat, qsurf
134  REAL, DIMENSION(klon), INTENT(OUT) :: tsol_rad, tsurf_new
135  REAL, DIMENSION(klon), INTENT(OUT) :: alb1_new, alb2_new
136  REAL, DIMENSION(klon), INTENT(OUT) :: emis_new, z0_new
137 
138 ! Local
139 !****************************************************************************************
140  INTEGER :: ij, jj, igrid, ireal, index
141  INTEGER :: error
142  INTEGER, SAVE :: nb_fields_cpl ! number of fields for the climate-carbon coupling (between ATM and ORCHIDEE).
143  REAL, SAVE, ALLOCATABLE, DIMENSION(:,:) :: fields_cpl ! Fluxes for the climate-carbon coupling
144  REAL, DIMENSION(klon) :: swdown_vrai
145  CHARACTER (len = 20) :: modname = 'surf_land_orchidee'
146  CHARACTER (len = 80) :: abort_message
147  LOGICAL,SAVE :: check = .false.
148  !$OMP THREADPRIVATE(check)
149 
150 ! type de couplage dans sechiba
151 ! character (len=10) :: coupling = 'implicit'
152 ! drapeaux controlant les appels dans SECHIBA
153 ! type(control_type), save :: control_in
154 ! Preserved albedo
155  REAL, ALLOCATABLE, DIMENSION(:), SAVE :: albedo_keep, zlev
156  !$OMP THREADPRIVATE(albedo_keep,zlev)
157 ! coordonnees geographiques
158  REAL, ALLOCATABLE, DIMENSION(:,:), SAVE :: lalo
159  !$OMP THREADPRIVATE(lalo)
160 ! pts voisins
161  INTEGER,ALLOCATABLE, DIMENSION(:,:), SAVE :: neighbours
162  !$OMP THREADPRIVATE(neighbours)
163 ! fractions continents
164  REAL,ALLOCATABLE, DIMENSION(:), SAVE :: contfrac
165  !$OMP THREADPRIVATE(contfrac)
166 ! resolution de la grille
167  REAL, ALLOCATABLE, DIMENSION (:,:), SAVE :: resolution
168  !$OMP THREADPRIVATE(resolution)
169 
170  REAL, ALLOCATABLE, DIMENSION (:,:), SAVE :: lon_scat, lat_scat
171  !$OMP THREADPRIVATE(lon_scat,lat_scat)
172 
173  LOGICAL, SAVE :: lrestart_read = .true.
174  !$OMP THREADPRIVATE(lrestart_read)
175  LOGICAL, SAVE :: lrestart_write = .false.
176  !$OMP THREADPRIVATE(lrestart_write)
177 
178  REAL, DIMENSION(knon,2) :: albedo_out
179  !$OMP THREADPRIVATE(albedo_out)
180 
181 ! Pb de nomenclature
182  REAL, DIMENSION(klon) :: peta_orc, peqa_orc
183  REAL, DIMENSION(klon) :: petb_orc, peqb_orc
184 ! Pb de correspondances de grilles
185  INTEGER, DIMENSION(:), SAVE, ALLOCATABLE :: ig, jg
186  !$OMP THREADPRIVATE(ig,jg)
187  INTEGER :: indi, indj
188  INTEGER, SAVE, ALLOCATABLE,DIMENSION(:) :: ktindex
189  !$OMP THREADPRIVATE(ktindex)
190 
191 ! Essai cdrag
192  REAL, DIMENSION(klon) :: cdrag
193  INTEGER,SAVE :: offset
194  !$OMP THREADPRIVATE(offset)
195 
196  REAL, DIMENSION(klon_glo) :: rlon_g,rlat_g
197  INTEGER, SAVE :: orch_comm
198  !$OMP THREADPRIVATE(orch_comm)
199 
200  REAL, ALLOCATABLE, DIMENSION(:), SAVE :: coastalflow
201  !$OMP THREADPRIVATE(coastalflow)
202  REAL, ALLOCATABLE, DIMENSION(:), SAVE :: riverflow
203  !$OMP THREADPRIVATE(riverflow)
204 !
205 ! Fin definition
206 !****************************************************************************************
207 #ifdef CPP_VEGET
208 
209  IF (check) WRITE(lunout,*)'Entree ', modname
210 
211 ! Initialisation
212 
213  IF (debut) THEN
214 ! Test de coherence
215 #ifndef ORCH_NEW
216  ! Compilation avec orchidee nouvelle version necessaire avec carbon_cycle_cpl=y
217  IF (carbon_cycle_cpl) THEN
218  abort_message='You must define preprossing key ORCH_NEW when running carbon_cycle_cpl=y'
219  CALL abort_physic(modname,abort_message,1)
220  END IF
221 #endif
222  ALLOCATE(ktindex(knon))
223  IF ( .NOT. ALLOCATED(albedo_keep)) THEN
224  ALLOCATE(albedo_keep(klon))
225  ALLOCATE(zlev(knon))
226  ENDIF
227 ! Pb de correspondances de grilles
228  ALLOCATE(ig(klon))
229  ALLOCATE(jg(klon))
230  ig(1) = 1
231  jg(1) = 1
232  indi = 0
233  indj = 2
234  DO igrid = 2, klon - 1
235  indi = indi + 1
236  IF ( indi > nbp_lon) THEN
237  indi = 1
238  indj = indj + 1
239  ENDIF
240  ig(igrid) = indi
241  jg(igrid) = indj
242  ENDDO
243  ig(klon) = 1
244  jg(klon) = nbp_lat
245 
246  IF ((.NOT. ALLOCATED(lalo))) THEN
247  ALLOCATE(lalo(knon,2), stat = error)
248  IF (error /= 0) THEN
249  abort_message='Pb allocation lalo'
250  CALL abort_physic(modname,abort_message,1)
251  ENDIF
252  ENDIF
253  IF ((.NOT. ALLOCATED(lon_scat))) THEN
254  ALLOCATE(lon_scat(nbp_lon,nbp_lat), stat = error)
255  IF (error /= 0) THEN
256  abort_message='Pb allocation lon_scat'
257  CALL abort_physic(modname,abort_message,1)
258  ENDIF
259  ENDIF
260  IF ((.NOT. ALLOCATED(lat_scat))) THEN
261  ALLOCATE(lat_scat(nbp_lon,nbp_lat), stat = error)
262  IF (error /= 0) THEN
263  abort_message='Pb allocation lat_scat'
264  CALL abort_physic(modname,abort_message,1)
265  ENDIF
266  ENDIF
267  lon_scat = 0.
268  lat_scat = 0.
269  DO igrid = 1, knon
270  index = knindex(igrid)
271  lalo(igrid,2) = rlon(index)
272  lalo(igrid,1) = rlat(index)
273  ENDDO
274 
275 
276 
277  CALL gather(rlon,rlon_g)
278  CALL gather(rlat,rlat_g)
279 
280  IF (is_mpi_root) THEN
281  index = 1
282  DO jj = 2, nbp_lat-1
283  DO ij = 1, nbp_lon
284  index = index + 1
285  lon_scat(ij,jj) = rlon_g(index)
286  lat_scat(ij,jj) = rlat_g(index)
287  ENDDO
288  ENDDO
289  lon_scat(:,1) = lon_scat(:,2)
290  lat_scat(:,1) = rlat_g(1)
291  lon_scat(:,nbp_lat) = lon_scat(:,2)
292  lat_scat(:,nbp_lat) = rlat_g(klon_glo)
293  ENDIF
294 
295  CALL bcast(lon_scat)
296  CALL bcast(lat_scat)
297 
298 !
299 ! Allouer et initialiser le tableau des voisins et des fraction de continents
300 !
301  IF ( (.NOT.ALLOCATED(neighbours))) THEN
302  ALLOCATE(neighbours(knon,8), stat = error)
303  IF (error /= 0) THEN
304  abort_message='Pb allocation neighbours'
305  CALL abort_physic(modname,abort_message,1)
306  ENDIF
307  ENDIF
308  neighbours = -1.
309  IF (( .NOT. ALLOCATED(contfrac))) THEN
310  ALLOCATE(contfrac(knon), stat = error)
311  IF (error /= 0) THEN
312  abort_message='Pb allocation contfrac'
313  CALL abort_physic(modname,abort_message,1)
314  ENDIF
315  ENDIF
316 
317  DO igrid = 1, knon
318  ireal = knindex(igrid)
319  contfrac(igrid) = pctsrf(ireal,is_ter)
320  ENDDO
321 
322 
323  CALL init_neighbours(knon,neighbours,knindex,pctsrf(:,is_ter))
324 
325 !
326 ! Allocation et calcul resolutions
327  IF ( (.NOT.ALLOCATED(resolution))) THEN
328  ALLOCATE(resolution(knon,2), stat = error)
329  IF (error /= 0) THEN
330  abort_message='Pb allocation resolution'
331  CALL abort_physic(modname,abort_message,1)
332  ENDIF
333  ENDIF
334  DO igrid = 1, knon
335  ij = knindex(igrid)
336  resolution(igrid,1) = dx(ij)
337  resolution(igrid,2) = dy(ij)
338  ENDDO
339 
340  ALLOCATE(coastalflow(klon), stat = error)
341  IF (error /= 0) THEN
342  abort_message='Pb allocation coastalflow'
343  CALL abort_physic(modname,abort_message,1)
344  ENDIF
345 
346  ALLOCATE(riverflow(klon), stat = error)
347  IF (error /= 0) THEN
348  abort_message='Pb allocation riverflow'
349  CALL abort_physic(modname,abort_message,1)
350  ENDIF
351 
352 !
353 ! Allocate variables needed for carbon_cycle_mod
354  IF ( carbon_cycle_cpl ) THEN
355  nb_fields_cpl=2
356  ELSE
357  nb_fields_cpl=1
358  END IF
359 
360 
361  IF (carbon_cycle_cpl) THEN
362  ALLOCATE(fco2_land_inst(klon),stat=error)
363  IF (error /= 0) CALL abort_physic(modname,'Pb in allocation fco2_land_inst',1)
364 
365  ALLOCATE(fco2_lu_inst(klon),stat=error)
366  IF(error /=0) CALL abort_physic(modname,'Pb in allocation fco2_lu_inst',1)
367  END IF
368 
369  ALLOCATE(fields_cpl(klon,nb_fields_cpl), stat = error)
370  IF (error /= 0) CALL abort_physic(modname,'Pb in allocation fields_cpl',1)
371 
372  ENDIF ! (fin debut)
373 
374 !
375 ! Appel a la routine sols continentaux
376 !
377  IF (lafin) lrestart_write = .true.
378  IF (check) WRITE(lunout,*)'lafin ',lafin,lrestart_write
379 
380  peta_orc(1:knon) = petbcoef(1:knon) * dtime
381  petb_orc(1:knon) = petacoef(1:knon)
382  peqa_orc(1:knon) = peqbcoef(1:knon) * dtime
383  peqb_orc(1:knon) = peqacoef(1:knon)
384 
385  cdrag = 0.
386  cdrag(1:knon) = tq_cdrag(1:knon)
387 
388 ! zlev(1:knon) = (100.*plev(1:knon))/((ps(1:knon)/287.05*temp_air(1:knon))*9.80665)
389  zlev(1:knon) = (100.*plev(1:knon))/((ps(1:knon)/rd*temp_air(1:knon))*rg)
390 
391 
392 ! PF et PASB
393 ! where(cdrag > 0.01)
394 ! cdrag = 0.01
395 ! endwhere
396 ! write(*,*)'Cdrag = ',minval(cdrag),maxval(cdrag)
397 
398 !
399 ! Init Orchidee
400 !
401 ! if (pole_nord) then
402 ! offset=0
403 ! ktindex(:)=ktindex(:)+nbp_lon-1
404 ! else
405 ! offset = klon_mpi_begin-1+nbp_lon-1
406 ! ktindex(:)=ktindex(:)+MOD(offset,nbp_lon)
407 ! offset=offset-MOD(offset,nbp_lon)
408 ! endif
409 
410  IF (debut) THEN
411  CALL get_orchidee_communicator(knon,orch_comm)
412  IF (knon /=0) THEN
413  CALL init_orchidee_index(knon,orch_comm,knindex,offset,ktindex)
414 
415 #ifndef CPP_MPI
416  ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI)
417  CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, knon, ktindex, dtime, &
418  lrestart_read, lrestart_write, lalo, &
419  contfrac, neighbours, resolution, date0, &
420  zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, &
421  cdrag, peta_orc, peqa_orc, petb_orc, peqb_orc, &
422  precip_rain, precip_snow, lwdown, swnet, swdown, ps, &
423  evap, fluxsens, fluxlat, coastalflow, riverflow, &
424  tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, &
425  lon_scat, lat_scat, q2m, t2m &
426 #ifdef ORCH_NEW
427  , nb_fields_cpl, fields_cpl)
428 #else
429  )
430 #endif
431 
432 #else
433  ! Interface for ORCHIDEE version 1.9 or later(1.9.2, 1.9.3, 1.9.4, 1.9.5) compiled in parallel mode(with preprocessing flag CPP_MPI)
434  CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, offset, knon, ktindex, &
435  orch_comm, dtime, lrestart_read, lrestart_write, lalo, &
436  contfrac, neighbours, resolution, date0, &
437  zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), &
438  cdrag(1:knon), peta_orc(1:knon), peqa_orc(1:knon), petb_orc(1:knon), peqb_orc(1:knon), &
439  precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown(1:knon), ps(1:knon), &
440  evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), &
441  tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), &
442  lon_scat, lat_scat, q2m, t2m &
443 #ifdef ORCH_NEW
444  , nb_fields_cpl, fields_cpl(1:knon,:))
445 #else
446  )
447 #endif
448 #endif
449 
450  ENDIF
451 
452  albedo_keep(1:knon) = (albedo_out(1:knon,1)+albedo_out(1:knon,2))/2.
453 
454  ENDIF
455 
456 ! swdown_vrai(1:knon) = swnet(1:knon)/(1. - albedo_keep(1:knon))
457  swdown_vrai(1:knon) = swdown(1:knon)
458 
459  IF (knon /=0) THEN
460 #ifndef CPP_MPI
461  ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI)
462  CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat, knon, ktindex, dtime, &
463  lrestart_read, lrestart_write, lalo, &
464  contfrac, neighbours, resolution, date0, &
465  zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, &
466  cdrag, peta_orc, peqa_orc, petb_orc, peqb_orc, &
467  precip_rain, precip_snow, lwdown, swnet, swdown_vrai, ps, &
468  evap, fluxsens, fluxlat, coastalflow, riverflow, &
469  tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, &
470  lon_scat, lat_scat, q2m, t2m &
471 #ifdef ORCH_NEW
472  , nb_fields_cpl, fields_cpl)
473 #else
474  )
475 #endif
476 #else
477  ! Interface for ORCHIDEE version 1.9 or later compiled in parallel mode(with preprocessing flag CPP_MPI)
478  CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat,offset, knon, ktindex, &
479  orch_comm,dtime, lrestart_read, lrestart_write, lalo, &
480  contfrac, neighbours, resolution, date0, &
481  zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), &
482  cdrag(1:knon), peta_orc(1:knon), peqa_orc(1:knon), petb_orc(1:knon), peqb_orc(1:knon), &
483  precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown_vrai(1:knon), ps(1:knon), &
484  evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), &
485  tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), &
486  lon_scat, lat_scat, q2m, t2m &
487 #ifdef ORCH_NEW
488  , nb_fields_cpl, fields_cpl(1:knon,:))
489 #else
490  )
491 #endif
492 #endif
493  ENDIF
494 
495  albedo_keep(1:knon) = (albedo_out(1:knon,1)+albedo_out(1:knon,2))/2.
496 
497 !* Send to coupler
498 !
499  IF (type_ocean=='couple') THEN
500  CALL cpl_send_land_fields(itime, knon, knindex, &
501  riverflow, coastalflow)
502  ENDIF
503 
504  alb1_new(1:knon) = albedo_out(1:knon,1)
505  alb2_new(1:knon) = albedo_out(1:knon,2)
506 
507 ! Convention orchidee: positif vers le haut
508  fluxsens(1:knon) = -1. * fluxsens(1:knon)
509  fluxlat(1:knon) = -1. * fluxlat(1:knon)
510 
511 ! evap = -1. * evap
512 
513  IF (debut) lrestart_read = .false.
514 
515 ! Decompress variables for the module carbon_cycle_mod
516  IF (carbon_cycle_cpl) THEN
517  fco2_land_inst(:)=0.
518  fco2_lu_inst(:)=0.
519 
520  DO igrid = 1, knon
521  ireal = knindex(igrid)
522  fco2_land_inst(ireal) = fields_cpl(igrid,1)
523  fco2_lu_inst(ireal) = fields_cpl(igrid,2)
524  END DO
525  END IF
526 
527 #endif
528  END SUBROUTINE surf_land_orchidee
529 !
530 !****************************************************************************************
531 !
532  SUBROUTINE init_orchidee_index(knon,orch_comm,knindex,offset,ktindex)
533 
534  USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat
535 
536 #ifdef CPP_MPI
537  include 'mpif.h'
538 #endif
539 
540 
541 ! Input arguments
542 !****************************************************************************************
543  INTEGER, INTENT(IN) :: knon
544  INTEGER, INTENT(IN) :: orch_comm
545  INTEGER, DIMENSION(klon), INTENT(IN) :: knindex
546 
547 ! Output arguments
548 !****************************************************************************************
549  INTEGER, INTENT(OUT) :: offset
550  INTEGER, DIMENSION(knon), INTENT(OUT) :: ktindex
551 
552 ! Local varables
553 !****************************************************************************************
554 #ifdef CPP_MPI
555  INTEGER, DIMENSION(MPI_STATUS_SIZE) :: status
556 #endif
557 
558  INTEGER :: mylastpoint
559  INTEGER :: lastpoint
560  INTEGER :: mpi_rank_orch
561  INTEGER :: mpi_size_orch
562  INTEGER :: ierr
563 !
564 ! End definition
565 !****************************************************************************************
566 
567  mylastpoint=klon_mpi_begin-1+knindex(knon)+nbp_lon-1
568 
569  IF (is_parallel) THEN
570 #ifdef CPP_MPI
571  CALL mpi_comm_size(orch_comm,mpi_size_orch,ierr)
572  CALL mpi_comm_rank(orch_comm,mpi_rank_orch,ierr)
573 #endif
574  ELSE
575  mpi_rank_orch=0
576  mpi_size_orch=1
577  ENDIF
578 
579  IF (is_parallel) THEN
580  IF (mpi_rank_orch /= 0) THEN
581 #ifdef CPP_MPI
582  CALL mpi_recv(lastpoint,1,mpi_integer,mpi_rank_orch-1,1234,orch_comm,status,ierr)
583 #endif
584  ENDIF
585 
586  IF (mpi_rank_orch /= mpi_size_orch-1) THEN
587 #ifdef CPP_MPI
588  CALL mpi_send(mylastpoint,1,mpi_integer,mpi_rank_orch+1,1234,orch_comm,ierr)
589 #endif
590  ENDIF
591  ENDIF
592 
593  IF (mpi_rank_orch == 0) THEN
594  offset=0
595  ELSE
596  offset=lastpoint-mod(lastpoint,nbp_lon)
597  ENDIF
598 
599  ktindex(1:knon)=knindex(1:knon)+(klon_mpi_begin+nbp_lon-1)-offset-1
600 
601 
602  END SUBROUTINE init_orchidee_index
603 !
604 !****************************************************************************************
605 !
606  SUBROUTINE get_orchidee_communicator(knon,orch_comm)
607 
608 #ifdef CPP_MPI
609  include 'mpif.h'
610 #endif
611 
612 
613  INTEGER,INTENT(IN) :: knon
614  INTEGER,INTENT(OUT) :: orch_comm
615 
616  INTEGER :: color
617  INTEGER :: ierr
618 !
619 ! End definition
620 !****************************************************************************************
621 
622  IF (knon==0) THEN
623  color = 0
624  ELSE
625  color = 1
626  ENDIF
627 
628 #ifdef CPP_MPI
629  CALL mpi_comm_split(comm_lmdz_phy,color,mpi_rank,orch_comm,ierr)
630 #endif
631 
632  END SUBROUTINE get_orchidee_communicator
633 !
634 !****************************************************************************************
635 !
636  SUBROUTINE init_neighbours(knon,neighbours,ktindex,pctsrf)
637 
638  USE indice_sol_mod
639  USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat
640 
641 #ifdef CPP_MPI
642  include 'mpif.h'
643 #endif
644 
645 ! Input arguments
646 !****************************************************************************************
647  INTEGER, INTENT(IN) :: knon
648  INTEGER, DIMENSION(klon), INTENT(IN) :: ktindex
649  REAL, DIMENSION(klon), INTENT(IN) :: pctsrf
650 
651 ! Output arguments
652 !****************************************************************************************
653  INTEGER, DIMENSION(knon,8), INTENT(OUT) :: neighbours
654 
655 ! Local variables
656 !****************************************************************************************
657  INTEGER :: knon_g
658  INTEGER :: i, igrid, jj, ij, iglob
659  INTEGER :: ierr, ireal, index
660  INTEGER :: var_tmp
661  INTEGER, DIMENSION(0:mpi_size-1) :: knon_nb
662  INTEGER, DIMENSION(0:mpi_size-1) :: displs
663  INTEGER, DIMENSION(8,3) :: off_ini
664  INTEGER, DIMENSION(8) :: offset
665  INTEGER, DIMENSION(knon) :: ktindex_p
666  INTEGER, DIMENSION(nbp_lon,nbp_lat) :: correspond
667  INTEGER, ALLOCATABLE, DIMENSION(:) :: ktindex_g
668  INTEGER, ALLOCATABLE, DIMENSION(:,:) :: neighbours_g
669  REAL, DIMENSION(klon_glo) :: pctsrf_g
670 
671 !
672 ! End definition
673 !****************************************************************************************
674 
675  IF (is_sequential) THEN
676  knon_nb(:)=knon
677  ELSE
678 
679 #ifdef CPP_MPI
680  CALL mpi_gather(knon,1,mpi_integer,knon_nb,1,mpi_integer,0,comm_lmdz_phy,ierr)
681 #endif
682 
683  ENDIF
684 
685  IF (is_mpi_root) THEN
686  knon_g=sum(knon_nb(:))
687  ALLOCATE(ktindex_g(knon_g))
688  ALLOCATE(neighbours_g(knon_g,8))
689  neighbours_g(:,:)=-1
690  displs(0)=0
691  DO i=1,mpi_size-1
692  displs(i)=displs(i-1)+knon_nb(i-1)
693  ENDDO
694  ELSE
695  ALLOCATE(ktindex_g(1))
696  ALLOCATE(neighbours_g(1,8))
697  ENDIF
698 
699  ktindex_p(1:knon)=ktindex(1:knon)+klon_mpi_begin-1+nbp_lon-1
700 
701  IF (is_sequential) THEN
702  ktindex_g(:)=ktindex_p(:)
703  ELSE
704 
705 #ifdef CPP_MPI
706  CALL mpi_gatherv(ktindex_p,knon,mpi_integer,ktindex_g,knon_nb,&
707  displs,mpi_integer,0,comm_lmdz_phy,ierr)
708 #endif
709 
710  ENDIF
711 
712  CALL gather(pctsrf,pctsrf_g)
713 
714  IF (is_mpi_root) THEN
715 ! Initialisation des offset
716 !
717 ! offset bord ouest
718  off_ini(1,1) = - nbp_lon ; off_ini(2,1) = - nbp_lon + 1; off_ini(3,1) = 1
719  off_ini(4,1) = nbp_lon + 1; off_ini(5,1) = nbp_lon ; off_ini(6,1) = 2 * nbp_lon - 1
720  off_ini(7,1) = nbp_lon -1 ; off_ini(8,1) = - 1
721 ! offset point normal
722  off_ini(1,2) = - nbp_lon ; off_ini(2,2) = - nbp_lon + 1; off_ini(3,2) = 1
723  off_ini(4,2) = nbp_lon + 1; off_ini(5,2) = nbp_lon ; off_ini(6,2) = nbp_lon - 1
724  off_ini(7,2) = -1 ; off_ini(8,2) = - nbp_lon - 1
725 ! offset bord est
726  off_ini(1,3) = - nbp_lon; off_ini(2,3) = - 2 * nbp_lon + 1; off_ini(3,3) = - nbp_lon + 1
727  off_ini(4,3) = 1 ; off_ini(5,3) = nbp_lon ; off_ini(6,3) = nbp_lon - 1
728  off_ini(7,3) = -1 ; off_ini(8,3) = - nbp_lon - 1
729 !
730 !
731 ! Attention aux poles
732 !
733  DO igrid = 1, knon_g
734  index = ktindex_g(igrid)
735  jj = int((index - 1)/nbp_lon) + 1
736  ij = index - (jj - 1) * nbp_lon
737  correspond(ij,jj) = igrid
738  ENDDO
739 
740  DO igrid = 1, knon_g
741  iglob = ktindex_g(igrid)
742  IF (mod(iglob, nbp_lon) == 1) THEN
743  offset = off_ini(:,1)
744  ELSE IF(mod(iglob, nbp_lon) == 0) THEN
745  offset = off_ini(:,3)
746  ELSE
747  offset = off_ini(:,2)
748  ENDIF
749  DO i = 1, 8
750  index = iglob + offset(i)
751  ireal = (min(max(1, index - nbp_lon + 1), klon_glo))
752  IF (pctsrf_g(ireal) > epsfra) THEN
753  jj = int((index - 1)/nbp_lon) + 1
754  ij = index - (jj - 1) * nbp_lon
755  neighbours_g(igrid, i) = correspond(ij, jj)
756  ENDIF
757  ENDDO
758  ENDDO
759 
760  ENDIF
761 
762  DO i=1,8
763  IF (is_sequential) THEN
764  neighbours(:,i)=neighbours_g(:,i)
765  ELSE
766 #ifdef CPP_MPI
767  IF (knon > 0) THEN
768  ! knon>0, scattter global field neighbours_g from master process to local process
769  CALL mpi_scatterv(neighbours_g(:,i),knon_nb,displs,mpi_integer,neighbours(:,i),knon,mpi_integer,0,comm_lmdz_phy,ierr)
770  ELSE
771  ! knon=0, no need to save the field for this process
772  CALL mpi_scatterv(neighbours_g(:,i),knon_nb,displs,mpi_integer,var_tmp,knon,mpi_integer,0,comm_lmdz_phy,ierr)
773  END IF
774 #endif
775  ENDIF
776  ENDDO
777 
778  END SUBROUTINE init_neighbours
779 !
780 !****************************************************************************************
781 !
782 
783 #endif
!IM Implemente en modes sequentiel et parallele CALL rlon_glo CALL bcast(rlon_glo)!$OMP MASTER if(is_mpi_root) then!zstophy
integer, parameter is_ter
real, dimension(:), allocatable, save dx
integer, save klon
Definition: dimphy.F90:3
real, dimension(:), allocatable, public fco2_land_inst
real, dimension(:), allocatable, public fco2_lu_inst
!$Id itau_phy
Definition: temps.h:15
!$Id itapm1 ENDIF!IM on interpole les champs sur les niveaux STD de pression!IM a chaque pas de temps de la physique c!positionnement de l argument logique a false c!pour ne pas recalculer deux fois la meme chose!c!a cet effet un appel a plevel_new a ete deplace c!a la fin de la serie d appels c!la boucle DO nlevSTD a ete internalisee c!dans d ou la creation de cette routine c c!CALL false
Definition: calcul_STDlev.h:26
!$Id klon klev DO klon!IM klev DO klon klon nbp_lat DO nbp_lon ij
!IM Implemente en modes sequentiel et parallele CALL gather(rlat, rlat_glo) CALL bcast(rlat_glo) CALL gather(rlon
real, dimension(:), allocatable, save dy
!$Id itapm1 ENDIF!IM on interpole les champs sur les niveaux STD de pression!IM a chaque pas de temps de la physique c!positionnement de l argument logique a false c!pour ne pas recalculer deux fois la meme chose!c!a cet effet un appel a plevel_new a ete deplace c!a la fin de la serie d appels c!la boucle DO nlevSTD a ete internalisee c!dans d ou la creation de cette routine c c!CALL ulevSTD CALL &zphi philevSTD CALL &zx_rh rhlevSTD!DO klev DO klon klev DO klon klev DO klon klev DO klon klev DO klon klev DO klon klev DO klon klev DO klon klev DO klon klev DO klon du jour ou toutes les read_climoz CALL true
c c $Id c nbregdyn DO klon c rlat(i) c ENDIF!lon c ENDIF!lat ENDIF!pctsrf ENDDO!klon ENDDO!nbregdyn cIM 190504 ENDIF!ok_regdyn cIM somme de toutes les nhistoW BEG IF(debut) THEN DO nreg
character(len=6), save type_ocean
subroutine cdrag(knon, nsrf, speed, t1, q1, zgeop1, psol, tsurf, qsurf, z0m, z0h, pcfm, pcfh, zri, pref)
Definition: cdrag.F90:8
real, parameter epsfra
subroutine, public cpl_send_land_fields(itime, knon, knindex, rriv_in, rcoa_in)
Definition: cpl_mod.F90:907
!$Header!integer nvarmx s s itime
Definition: gradsdef.h:20
subroutine abort_physic(modname, message, ierr)
Definition: abort_physic.F90:3
logical, public carbon_cycle_cpl
logical, save is_sequential
Definition: dimphy.F90:1
c c $Id c nbregdyn DO klon c rlon(i)
!$Header!gestion des impressions de sorties et de débogage la sortie standard prt_level COMMON comprint lunout
Definition: iniprint.h:7
real rg
Definition: comcstphy.h:1