Skip to content
Commits on Source (7)
......@@ -2,12 +2,15 @@
# Force Travis to rebuild by commiting an increment to this counter: 5
language: c
compiler:
- gcc
- clang
- g++
before_install:
- if [[ "${CC}" == "gcc" || "${CC}" == "g++" ]]; then export CXX=g++; fi
- if [[ "${CC}" == "clang" || "${CC}" == "clang++" ]]; then export CXX=clang++; fi
- sudo apt-get update -qq
- sudo apt-get install -y -qq antlr libantlr-dev libnetcdfc7 libnetcdf-dev netcdf-bin udunits-bin libudunits2-0 libudunits2-dev
- ./configure
......
......@@ -52,10 +52,12 @@ endif()
#general C/C++ build flags
#/////////////////////////////////////////////////////////////////////////////////////
set(CMAKE_BUILD_TYPE Debug)
set(BUILD_STATIC_LIBS ON)
set(BUILD_SHARED_LIBS OFF)
set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build.")
set(BUILD_SHARED_LIBS OFF CACHE BOOL "Build static libs.")
if (NOT BUILD_SHARED_LIBS)
add_definitions(-DENABLE_STATIC)
endif()
#//////////////////////////
#detect minimum dependencies using
......@@ -104,6 +106,7 @@ endif()
#netCDF
#Check for several functions in the netCDF library; define C macro accordingly
#CMAKE_REQUIRED_LIBRARIES needs NETCDF_LIBRARY
#check for header file netcdf_mem.h existence
#/////////////////////////////////////////////////////////////////////////////////////
set(CMAKE_REQUIRED_LIBRARIES ${NETCDF_LIBRARY})
......@@ -119,19 +122,27 @@ if (has_nc_inq_format)
add_definitions(-DHAVE_NC_INQ_FORMAT)
endif()
find_path(netcdf_mem_h netcdf_mem.h)
if (netcdf_mem_h)
message("-- Found netcdf_mem.h in: " ${netcdf_mem_h})
add_definitions(-DHAVE_NETCDF_MEM_H)
else()
message("-- NetCDF diskless functionaliy not available, netcdf_mem.h not found")
endif()
#/////////////////////////////////////////////////////////////////////////////////////
#HDF5
#/////////////////////////////////////////////////////////////////////////////////////
find_library(HDF5_LIBRARY NAMES hdf5 ${find_opt})
find_library(HDF5_LIBRARY NAMES hdf5 PATHS "/usr/lib/x86_64-linux-gnu/hdf5/serial")
if(NOT HDF5_LIBRARY)
message(FATAL_ERROR "${color_red}hdf5 library not found${color_reset}")
else()
message("-- Found hdf5 library at: " ${HDF5_LIBRARY})
endif()
find_library(HDF5_HL_LIBRARY NAMES hdf5_hl ${find_opt})
find_library(HDF5_HL_LIBRARY NAMES hdf5_hl PATHS "/usr/lib/x86_64-linux-gnu/hdf5/serial")
if(NOT HDF5_HL_LIBRARY)
message(FATAL_ERROR "${color_red}hdf5 high level library not found${color_reset}")
else()
......@@ -238,22 +249,27 @@ endif()
#compile as C++/static CRT on Visual Studio and as C99 on UNIX
#//////////////////////////
set(NCO_MSVC_USE_MT yes CACHE BOOL "Use MT flags when compiling in MSVC")
if (MSVC)
add_definitions( "/D_CRT_SECURE_NO_WARNINGS /D_CRT_NONSTDC_NO_DEPRECATE" )
message("-- Building with static runtime library")
set(CMAKE_CXX_FLAGS_DEBUG "/D_DEBUG /MTd /Zi /Ob0 /Od /RTC1")
set(CMAKE_CXX_FLAGS_MINSIZEREL "/MT /O1 /Ob1 /D NDEBUG")
set(CMAKE_CXX_FLAGS_RELEASE "/MT /O2 /Ob2 /D NDEBUG")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "/MT /Zi /O2 /Ob1 /D NDEBUG")
if (NCO_MSVC_USE_MT)
foreach(CompilerFlag CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO
CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO)
string(REPLACE "/MD" "/MT" ${CompilerFlag} "${${CompilerFlag}}")
endforeach()
endif()
#//////////////////////////
#compile as C++ Code (/TP)
#//////////////////////////
set(CMAKE_C_FLAGS_DEBUG "/TP /D_DEBUG /MTd /Zi /Ob0 /Od /RTC1")
set(CMAKE_C_FLAGS_MINSIZEREL "/TP /MT /O1 /Ob1 /D NDEBUG")
set(CMAKE_C_FLAGS_RELEASE "/TP /MT /O2 /Ob2 /D NDEBUG")
set(CMAKE_C_FLAGS_RELWITHDEBINFO "/TP /MT /Zi /O2 /Ob1 /D NDEBUG")
foreach(CompilerFlag CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE
CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO)
set(${CompilerFlag} "/TP ${${CompilerFlag}}")
endforeach()
#//////////////////////////
#visual studio defines math symbols in math.h, avoid duplicate definition
......@@ -393,7 +409,8 @@ set(nco_SOURCES ${nco_SOURCES} src/nco/nco_var_rth.c src/nco/nco_var_rth.h)
set(nco_SOURCES ${nco_SOURCES} src/nco/nco_var_scv.c src/nco/nco_var_scv.h)
set(nco_SOURCES ${nco_SOURCES} src/nco/nco_var_utl.c src/nco/nco_var_utl.h)
add_library(nco STATIC ${nco_SOURCES})
# Type of library (SHARED, STATIC) is configured using BUILD_SHARED_LIBS
add_library(nco ${nco_SOURCES})
add_executable(ncks src/nco/ncks.c)
add_executable(ncbo src/nco/ncbo.c)
add_executable(ncecat src/nco/ncecat.c)
......
environment:
matrix:
- TARGET_ARCH: x64
CONDA_PY: 36
CONDA_INSTALL_LOCN: C:\\Miniconda36-x64
platform:
- x64
install:
# If there is a newer build queued for the same PR, cancel this one.
- ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
throw "There are newer queued builds for this pull request, failing early." }
- cmd: rmdir C:\cygwin /s /q
# Add path, activate `conda` and update conda.
- cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat
# # Add path, activate `conda` and update conda.
- cmd: conda config --set always_yes yes --set changeps1 no --set show_channel_urls true
- cmd: conda update conda
- cmd: conda config --prepend channels conda-forge
- cmd: set PYTHONUNBUFFERED=1
- cmd: conda install conda-build
- cmd: conda info --all
- cmd: conda list
# Skip .NET project specific build phase.
build: off
test_script:
- "conda build conda.recipe"
......@@ -655,6 +655,8 @@ CPP_TKN_OS := -DHAVE_REGEX_H -DNCO_HAVE_REGEX_FUNCTIONALITY -DHAVE_GETPAGESIZE -
CPP_TKN_OS += -DHAVE_NC_INQ_FORMAT
# Assume netCDF >= 4.3.2
CPP_TKN_OS += -DHAVE_NC_INQ_PATH
# Assume netCDF >= 4.4.0
CPP_TKN_OS += -DHAVE_NETCDF_MEM_H
# gcc 4.7.3 finally includes strcasestr() in string.h iff _GNU_SOURCE token is defined
# NB: C++ (or at least g++ 4.7.3) always includes strcasestr()---work around this with tokens in nco_sng_utl.[ch]
ifneq (${PVM_ARCH},CRAY)
......
......@@ -2,17 +2,17 @@
# http://cvs.fedoraproject.org/viewvc/devel/nco/nco.spec?view=co
Name: nco
Version: 4.7.0
Version: 4.7.1
Release: 1%{?dist}
Summary: Programs that manipulate netCDF files
Group: Applications/Engineering
License: GPL3
URL: http://nco.sf.net/
# Obtain NCO version 4.7.0-1 tar.gz from Sourceforge using CVS:
# Obtain NCO version 4.7.1-1 tar.gz from Sourceforge using CVS:
# cvs -d:pserver:anonymous@nco.cvs.sf.net:/cvsroot/nco login
# cvs -z3 -d:pserver:anonymous@nco.cvs.sf.net:/cvsroot/nco co -r nco-4.7.0-1 -d nco-%{version} nco
# tar czf nco-%{version}.tar.gz --exclude='nco-4.7.0/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude='ncap_yacc.[ch]' ./nco-%{version}
# cvs -z3 -d:pserver:anonymous@nco.cvs.sf.net:/cvsroot/nco co -r nco-4.7.1-1 -d nco-%{version} nco
# tar czf nco-%{version}.tar.gz --exclude='nco-4.7.1/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude='ncap_yacc.[ch]' ./nco-%{version}
Source0: nco-%{version}.tar.gz
#Patch0: nco_install_C_headers.patch
#Patch1: nco_find_udunits-dat.patch
......@@ -108,6 +108,9 @@ fi
# %{_libdir}/libnco++.so
%changelog
* Thu Dec 21 2017 Charlie Zender <zender@uci.edu> - 4.7.1-1
- new upstream 4.7.1
* Wed Nov 08 2017 Charlie Zender <zender@uci.edu> - 4.7.0-1
- new upstream 4.7.0
......
......@@ -5,20 +5,20 @@
# Usage:
# Export tagged, public versions
# /usr/bin/scp ${DATA}/nco-4.7.0.tar.gz zender,nco@web.sf.net:/home/project-web/nco/htdocs/src
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln nco-4.7.0 # Build, do not release on SF
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --sf nco-4.7.0 # Build, release on SF
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --nst_all nco-4.7.0 # Install, do not build
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --nst_all nco-4.7.0 # Build and install
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_cnt nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_prs nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cgd_cnt nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cray_prs nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --bbl_cnt nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --blk_cnt nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --dat_cnt nco-4.7.0
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --ute_prs nco-4.7.0
# /usr/bin/scp ${DATA}/nco-4.7.1.tar.gz zender,nco@web.sf.net:/home/project-web/nco/htdocs/src
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln nco-4.7.1 # Build, do not release on SF
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --sf nco-4.7.1 # Build, release on SF
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --nst_all nco-4.7.1 # Install, do not build
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --bld --cln --nst_all nco-4.7.1 # Build and install
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_cnt nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --acd_prs nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cgd_cnt nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --cray_prs nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --bbl_cnt nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --blk_cnt nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --dat_cnt nco-4.7.1
# ${HOME}/nco/bld/nco_dst.pl --dbg=2 --cln --ute_prs nco-4.7.1
# Export daily snapshot
# ${HOME}/nco/bld/nco_dst.pl --dbg=2
......@@ -256,7 +256,7 @@ if($bld){
# Set up FTP server
chdir $dst_pth_pfx or die "$prg_nm: ERROR unable to chdir to $dst_pth_pfx: $!\n"; # $! is system error string
cmd_prc("$cp_cmd $doc_fl ./$dst_vrs/doc"); # Copy derived documentation to source directory
cmd_prc("$tar_cmd cvzf $dst_fl --exclude='nco-4.7.0/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude=ncap_yacc.[ch] ./$dst_vrs"); # Create gzipped tarfile
cmd_prc("$tar_cmd cvzf $dst_fl --exclude='nco-4.7.1/debian*' --exclude='.cvsignore' --exclude=ncap_lex.c --exclude=ncap_yacc.[ch] ./$dst_vrs"); # Create gzipped tarfile
cmd_prc("$rsh_cmd $www_mch $rm_cmd $www_drc/src/$dst_fl"); # Remove any distribution with same name
if($dly_snp){cmd_prc("$rsh_cmd $www_mch $rm_cmd -r $www_drc/src/nco-????????.tar.gz");} # Remove previous daily snapshots from WWW server
cmd_prc("$rcp_cmd $dst_fl $www_mch:$www_drc/src"); # Copy local tarfile to WWW server
......
mkdir %SRC_DIR%\build
cd %SRC_DIR%\build
set "CFLAGS=%CFLAGS% -DWIN32 -DGSL_DLL"
set "CXXFLAGS=%CXXFLAGS% -DWIN32 -DGSL_DLL"
cmake -G "NMake Makefiles" ^
-D CMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% ^
-D CMAKE_BUILD_TYPE=Release ^
-D NCO_MSVC_USE_MT=no ^
-D CMAKE_PREFIX_PATH=%LIBRARY_PREFIX% ^
-D NETCDF_INCLUDE=%LIBRARY_INC% ^
-D NETCDF_LIBRARY=%LIBRARY_LIB%\netcdf.lib ^
-D HDF5_LIBRARY=%LIBRARY_LIB%\libhdf5.lib ^
-D HDF5_HL_LIBRARY=%LIBRARY_LIB%\libhdf5_hl.lib ^
-D GSL_INCLUDE=%LIBRARY_INC% ^
-D GSL_LIBRARY=%LIBRARY_LIB%\gsl.lib ^
-D GSL_CBLAS_LIBRARY=%LIBRARY_LIB%\gslcblas.lib ^
-D UDUNITS2_INCLUDE=%LIBRARY_LIB% ^
-D UDUNITS2_LIBRARY=%LIBRARY_LIB%\udunits2.lib ^
-D EXPAT_LIBRARY=%LIBRARY_LIB%\expat.lib ^
-D CURL_LIBRARY=%LIBRARY_LIB%\libcurl.lib ^
-D ANTLR_INCLUDE:PATH=%LIBRARY_INC%\antlr ^
%SRC_DIR%
if errorlevel 1 exit 1
nmake
if errorlevel 1 exit 1
nmake install
if errorlevel 1 exit 1
move %LIBRARY_PREFIX%\*.exe %LIBRARY_BIN% || exit 1
{% set version = "dev" %}
package:
name: nco
version: {{ version }}
source:
path: ../
build:
number: 0
features:
- vc14
requirements:
build:
- python
- cmake
- antlr >=2.7.7,<3
- curl >=7.44.0,<8
- expat 2.2.*
- gsl >=2.2,<2.3
- hdf5 1.10.1
- libnetcdf 4.4.*
- udunits2
- zlib 1.2.11
- vc 14
run:
- curl >=7.44.0,<8
- expat 2.2.*
- gsl >=2.2,<2.3
- hdf5 1.10.1
- libnetcdf 4.4.*
- udunits2
- vc 14
test:
source_files:
- data/in.cdl
commands:
- ncks --help
- ncap2 --help
- ncks -M "http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his/ESPRESSO_Real-Time_v2_History_Best"
cd data
ncgen -o in.nc in.cdl || exit 1
ncks -H --trd -v one in.nc || exit 1
ncap2 -O -v -s "erf_one=float(gsl_sf_erf(1.0f));" in.nc foo.nc || exit 1
ncks -v erf_one foo.nc || exit 1
......@@ -253,6 +253,9 @@
/* Define to 1 if you have the <netcdf.h> header file. */
#undef HAVE_NETCDF_H
/* Define to 1 if <netcdf_mem.h> is present */
#undef HAVE_NETCDF_MEM_H
/* Define to 1 if you have the <netinet/in.h> header file. */
#undef HAVE_NETINET_IN_H
......
#! /bin/sh
# Guess values for system-dependent variables and create Makefiles.
# Generated by GNU Autoconf 2.69 for NCO netCDF Operators 4.7.0.
# Generated by GNU Autoconf 2.69 for NCO netCDF Operators 4.7.1.
#
# Report bugs to <nco-bugs@lists.sourceforge.net>.
#
......@@ -592,8 +592,8 @@ MAKEFLAGS=
# Identity of this package.
PACKAGE_NAME='NCO netCDF Operators'
PACKAGE_TARNAME='nco'
PACKAGE_VERSION='4.7.0'
PACKAGE_STRING='NCO netCDF Operators 4.7.0'
PACKAGE_VERSION='4.7.1'
PACKAGE_STRING='NCO netCDF Operators 4.7.1'
PACKAGE_BUGREPORT='nco-bugs@lists.sourceforge.net'
PACKAGE_URL=''
 
......@@ -1394,7 +1394,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
\`configure' configures NCO netCDF Operators 4.7.0 to adapt to many kinds of systems.
\`configure' configures NCO netCDF Operators 4.7.1 to adapt to many kinds of systems.
 
Usage: $0 [OPTION]... [VAR=VALUE]...
 
......@@ -1465,7 +1465,7 @@ fi
 
if test -n "$ac_init_help"; then
case $ac_init_help in
short | recursive ) echo "Configuration of NCO netCDF Operators 4.7.0:";;
short | recursive ) echo "Configuration of NCO netCDF Operators 4.7.1:";;
esac
cat <<\_ACEOF
 
......@@ -1633,7 +1633,7 @@ fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
NCO netCDF Operators configure 4.7.0
NCO netCDF Operators configure 4.7.1
generated by GNU Autoconf 2.69
 
Copyright (C) 2012 Free Software Foundation, Inc.
......@@ -2499,7 +2499,7 @@ cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
 
It was created by NCO netCDF Operators $as_me 4.7.0, which was
It was created by NCO netCDF Operators $as_me 4.7.1, which was
generated by GNU Autoconf 2.69. Invocation command line was
 
$ $0 $@
......@@ -3755,7 +3755,7 @@ fi
 
# Define the identity of the package.
PACKAGE='nco'
VERSION='4.7.0'
VERSION='4.7.1'
 
 
cat >>confdefs.h <<_ACEOF
......@@ -17824,6 +17824,28 @@ fi
 
fi
 
# 20171218: nc_open_mem() is defined in netCDF >= 4.4.0, however ...
# Ubuntu (Xenial at least) used broken netCDF CMake (not autoconf) to package 4.4.0 (it does not install netcdf_mem.h):
# https://github.com/nco/nco/issues/44
# Symptom of "missing netcdf_mem.h" and/or "unresolved nc_open_mem()" occurs with NCO 4.6.2+
# Until 20171112 we used (Option 1):
# #if NC_LIB_VERSION >= 440
# which forces Ubuntu netCDF 4.4.0 users to build netCDF with autoconf and install, e.g., into /usr/local, or
# to manually copy netcdf_mem.h into /usr/include (has anyone tested whether that really solves the problem?)
# Option 2 is to add test/symbol in build-engine, e.g.,
# #if defined(HAVE_NETCDF_MEM_H)
# which requires additional build tests in Autoconf/CMake/Makefile */
ac_fn_c_check_header_mongrel "$LINENO" "netcdf_mem.h" "ac_cv_header_netcdf_mem_h" "$ac_includes_default"
if test "x$ac_cv_header_netcdf_mem_h" = xyes; then :
$as_echo "#define HAVE_NETCDF_MEM_H 1" >>confdefs.h
else
nco_have_netcdf_mem_h=no
fi
# Begin OpenMP
# Check whether --enable-openmp was given.
if test "${enable_openmp+set}" = set; then :
......@@ -21526,7 +21548,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
This file was extended by NCO netCDF Operators $as_me 4.7.0, which was
This file was extended by NCO netCDF Operators $as_me 4.7.1, which was
generated by GNU Autoconf 2.69. Invocation command line was
 
CONFIG_FILES = $CONFIG_FILES
......@@ -21592,7 +21614,7 @@ _ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
NCO netCDF Operators config.status 4.7.0
NCO netCDF Operators config.status 4.7.1
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
 
......
......@@ -54,7 +54,7 @@
# 20170808: Choose between traditional version and Git-based version
# If Git, consider that Linux dynamic libraries will have full messy name, MacOSX will not
# https://stackoverflow.com/questions/43526939/how-to-insert-git-based-version-in-autoconf-managed-project
AC_INIT([NCO netCDF Operators],[4.7.0],[nco-bugs@lists.sourceforge.net],[nco])
AC_INIT([NCO netCDF Operators],[4.7.1],[nco-bugs@lists.sourceforge.net],[nco])
#AC_INIT([NCO netCDF Operators],m4_esyscmd([git describe --abbrev=7 --dirty --always --tags | tr -d '\n']),[nco-bugs@lists.sourceforge.net],[nco])
# Print GNU copyright in configure script
......@@ -312,6 +312,19 @@ if (test "x${enable_netcdf4}" = 'xyes' && test "x${header_defines_nc_chunked}" =
])
fi
# 20171218: nc_open_mem() is defined in netCDF >= 4.4.0, however ...
# Ubuntu (Xenial at least) used broken netCDF CMake (not autoconf) to package 4.4.0 (it does not install netcdf_mem.h):
# https://github.com/nco/nco/issues/44
# Symptom of "missing netcdf_mem.h" and/or "unresolved nc_open_mem()" occurs with NCO 4.6.2+
# Until 20171112 we used (Option 1):
# #if NC_LIB_VERSION >= 440
# which forces Ubuntu netCDF 4.4.0 users to build netCDF with autoconf and install, e.g., into /usr/local, or
# to manually copy netcdf_mem.h into /usr/include (has anyone tested whether that really solves the problem?)
# Option 2 is to add test/symbol in build-engine, e.g.,
# #if defined(HAVE_NETCDF_MEM_H)
# which requires additional build tests in Autoconf/CMake/Makefile */
AC_CHECK_HEADER([netcdf_mem.h],AC_DEFINE([HAVE_NETCDF_MEM_H],1,[Define to 1 if <netcdf_mem.h> is present]),nco_have_netcdf_mem_h=no)
# Begin OpenMP
AC_ARG_ENABLE(openmp,AS_HELP_STRING([--enable-openmp],[Build NCO with OpenMP [[default=yes]]]),enable_openmp=${enableval},enable_openmp=yes)
# Enable OpenMP unless told not to
......
......@@ -268,14 +268,41 @@ make check >> nco.make.${GNU_TRP}.foo 2>&1
make install >> nco.make.${GNU_TRP}.foo 2>&1
scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU_TRP}.foo nco.make.${GNU_TRP}.foo dust.ess.uci.edu:Sites/nco/rgr
# gcc/g++ Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories with CMake:
# Generic when netCDF is in /usr (Ubuntu glace, Fedora skyglow)
export GNU_TRP=`~/nco/autobld/config.guess`
cd ~/nco;/bin/rm -f *.${GNU_TRP}.foo;make distclean
cd ~/nco/cmake
cmake .. -DCMAKE_INSTALL_PREFIX=${HOME}
make install
# gcc/g++ Zender uses this to develop/install/update netCDF4-enabled NCO in system directories with CMake:
# Generic when netCDF is in /usr (Ubuntu glace, Fedora skyglow)
cd ~/nco/cmake
cmake .. -DCMAKE_INSTALL_PREFIX=/usr/local
make
sudo make install
# gcc/g++ Zender uses this to develop/install/update netCDF4-enabled NCO in system directories with CMake, when build requirements are in unusual directories:
# netCDF4/HDF5 in /usr/local
cd ~/nco/cmake
cmake .. -DCMAKE_INSTALL_PREFIX=${HOME} -DNETCDF_INCLUDE:PATH=${NETCDF_INC} -DNETCDF_LIBRARY:FILE=${NETCDF_LIB}/libnetcdf.a -DHDF5_LIBRARY:FILE=${NETCDF_LIB}/libhdf5.a -DHDF5_HL_LIBRARY:FILE=${NETCDF_LIB}/libhdf5.a # Skyglow (Fedora does not supply szip packages)
make install
# netCDF4/HDF5 in /usr/local
cd ~/nco/cmake
cmake .. -DCMAKE_INSTALL_PREFIX=${HOME} -DNETCDF_INCLUDE:PATH=${NETCDF_INC} -DNETCDF_LIBRARY:FILE=${NETCDF_LIB}/libnetcdf.a -DHDF5_LIBRARY:FILE=${NETCDF_LIB}/libhdf5.a -DHDF5_HL_LIBRARY:FILE=${NETCDF_LIB}/libhdf5.a # Glace
make install
# 20150901: gcc/g++ Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on cooley:
# Sysadmin upgrade request: 20170825
# Latest ticket: alcf-support #345910
# Latest sysadmin-compiled module usage: soft add +nco-4.6.8, soft add +netcdf-4.4.1.1
# Personal recipe last modified: 20170825
# Personal recipe last used successfully: 20170825
# Personal recipe last used successfully: 20171108
export GNU_TRP=`~/nco/autobld/config.guess`
cd ~/nco;/bin/rm -f *.${GNU_TRP}.foo;make distclean
cd ~/nco;git reset --hard origin/master
ANTLR_ROOT=${HOME} CC='gcc' CXX='g++' NETCDF_ROOT='/soft/libraries/unsupported/netcdf-4.4.1.1' UDUNITS2_PATH=${HOME} ./configure --prefix=${HOME} --bindir=${MY_BIN_DIR} --datadir=${HOME}/nco/data --libdir=${MY_LIB_DIR} --mandir=${HOME}/nco/man > nco.configure.${GNU_TRP}.foo 2>&1
/bin/cp -f config.log nco.config.log.${GNU_TRP}.foo
/bin/cp -f libtool nco.libtool.${GNU_TRP}.foo
......@@ -320,7 +347,7 @@ scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU
# Sysadmin upgrade request: https://nersc.service-now.com/navpage.do
# Latest sysadmin-compiled module usage: module load nco/4.6.7 # 20170612 (without ncap2)
# Personal recipe last modified: 20170411
# Personal recipe last used successfully: 20170919
# Personal recipe last used successfully: 20171108
module load szip
module load gsl
module load udunits
......@@ -341,7 +368,7 @@ scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU
# Latest ticket: INC0106410 on 20170804
# Latest sysadmin-compiled module usage: module load nco/4.6.7 # 20170809 (with ncap2)
# Personal recipe last modified: 20170807
# Personal recipe last used successfully: 20170927
# Personal recipe last used successfully: 20171108
module load szip
module load gsl
module load udunits2
......@@ -360,7 +387,7 @@ scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU
# Sysadmin upgrade request:
# Latest sysadmin-compiled module usage:
# Personal recipe last modified: 2016??
# Personal recipe last used successfully: 20170918
# Personal recipe last used successfully: 20171108
export GNU_TRP=`~/nco/autobld/config.guess`
cd ~/nco;/bin/rm -f *.${GNU_TRP}.foo;make distclean
ANTLR_ROOT=${HOME} CC='gcc' CXX='g++' NETCDF_ROOT='/usr/local/uvcdat/2016-01-21/Externals' UDUNITS2_PATH='/usr/local/uvcdat/2016-01-21/Externals' ./configure --prefix=${HOME} --bindir=${MY_BIN_DIR} --datadir=${HOME}/nco/data --libdir=${MY_LIB_DIR} --mandir=${HOME}/nco/man > nco.configure.${GNU_TRP}.foo 2>&1
......@@ -378,7 +405,7 @@ scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU
# Latest ticket: AREQ0159277 on 20170526
# Latest sysadmin-compiled module usage: soft add +nco-4.6.7 # 20170526
# Personal recipe last modified: 20161223
# Personal recipe last used successfully: 20170526
# Personal recipe last used successfully: 20171108
# fails to link to udunits...why?
# nm -a /soft/udunits/2.1.21/lib/libudunits2.a | grep ut_read_xml
# First use: 20161223 Latest use: 20161223
......@@ -412,11 +439,11 @@ scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU
# Latest ticket: 172597 on 20170526
# Latest sysadmin-compiled module usage: module load nco/4.6.9 # 20171002
# Personal recipe last modified: 20170812
# Personal recipe last used successfully: 20170919
# Personal recipe last used successfully: 20171108
module add intel/16.0.3;module add gsl;module add netcdf/4.4.1;
export PATH=${PATH}:/glade/apps/opt/netcdf/4.4.1/intel/16.0.3/bin # needed for ncgen
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/glade/apps/opt/netcdf/4.4.1/intel/16.0.3/lib # needed to run NCO
cd ~/nco;git reset --hard origin/master
cd ~/nco;git pull
cd ~/nco/bld;make NETCDF_ROOT='/glade/apps/opt/netcdf/4.4.1/intel/16.0.3' UDUNITS_INC='/glade/apps/opt/udunits/2.1.24/intel/12.1.4/include' UDUNITS_LIB='/glade/apps/opt/udunits/2.1.24/intel/12.1.4/lib' OPTS=D allinone;cd -
# icc/icpc 20171013 Zender uses this to develop/install/update netCDF4-enabled NCO in personal directories on cheyenne:
......@@ -436,7 +463,7 @@ cd ~/nco/bld;make NETCDF_ROOT='/glade/u/apps/ch/opt/netcdf/4.4.1.1/intel/17.0.1'
# Latest ticket: 358890 on 20170526
# Latest sysadmin-compiled module usage: module load nco/4.6.6 # 20170515
# Personal recipe last modified: 20170814
# Personal recipe last used successfully: 20170927
# Personal recipe last used successfully: 20171108
export LINUX_CC='gcc -std=c99 -pedantic -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_POSIX_SOURCE'
export LINUX_CXX='g++ -std=c++11'
export LINUX_FC='gfortran'
......@@ -455,7 +482,7 @@ cd ~/nco/bld;make ANTLR_ROOT=${HOME} NETCDF_ROOT='/autofs/nccs-svm1_sw/rhea/.swc
# Latest ticket: 347675 on 20170316
# Latest sysadmin-compiled module usage: module load nco/4.6.6 # 20170411
# Personal recipe last modified: 20170814
# Personal recipe last used successfully: 20170919
# Personal recipe last used successfully: 20171108
export LINUX_CC='gcc -std=c99 -pedantic -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_POSIX_SOURCE'
module add gcc # change GCC from v 4.3.4 to 4.9.0
#module add gsl # not used yet
......@@ -680,7 +707,7 @@ make check >> nco.make.${GNU_TRP}.foo 2>&1
make install >> nco.make.${GNU_TRP}.foo 2>&1
scp nco.configure.${GNU_TRP}.foo nco.config.log.${GNU_TRP}.foo nco.libtool.${GNU_TRP}.foo nco.make.${GNU_TRP}.foo dust.ess.uci.edu:Sites/nco/rgr
# gcc/g++ Zender uses this to debug NCO with custom-install netCDF (and ESMF) binaries in /usr/local (givre, roulee, grele)
# gcc/g++ Zender uses this to debug NCO with custom-install netCDF (and ESMF) binaries in /usr/local (givre, roulee, grele, skyglow)
export GNU_TRP=`~/nco/autobld/config.guess`
cd ~/nco;/bin/rm -f *.${GNU_TRP}.foo;make distclean
CC='gcc' CXX='g++' NETCDF_INC='/usr/local/include' NETCDF_LIB='/usr/local/lib' ./configure --enable-debug-custom --prefix=${HOME} --bindir=${MY_BIN_DIR} --datadir=${HOME}/nco/data --libdir=${MY_LIB_DIR} --mandir=${HOME}/nco/man > nco.configure.${GNU_TRP}.foo 2>&1
......
......@@ -629,6 +629,12 @@ variables:
char char_var_nul;
char_var_nul:long_name = "Character variable containing one NUL";
char char_var_1D_arr(time);
char_var_1D_arr:long_name = "char-type 1D variable array";
char char_var_2D_arr(lat,lon);
char_var_2D_arr:long_name = "char-type 2D variable array";
// 20131015: This confuses the XML parser. Omit it for now.
// char char_var_multinul(lev);
// char_var_multinul:long_name = "Character variable containing multiple NULs";
......@@ -1362,6 +1368,8 @@ data:
// char_var_multinul='0','\n','\0';
char_var_nul='\0';
char_var_space=" ";
char_var_1D_arr="two words";
char_var_2D_arr="one","two";
cnv_CF_grd=1,2,3,4,5,6,7,8;
cnv_CF_crd=273.1,273.2,273.3,273.4,273.5,273.6,273.7,273.8;
cnv_CF_ncl=1.,2.,3.,4.,5.,6.,7.,8.,9.,10.;
......
......@@ -390,7 +390,7 @@ lbr_vrs=$(ncks --library 2>&1 > /dev/null | awk '{print $6}')
if [ "${drc_spt}" != "${drc_nco}" ]; then
echo "WARNING: Possible mixture of NCO versions from different locations. Script ${spt_nm} is from directory ${drc_spt} while NCO binaries are from directory ${drc_nco}. Normally this script and the binaries are from the same executables directory. This WARNING may be safely ignored for customized scripts and/or binaries that the user has intentionally split into different directories."
echo "HINT (All-users): Conflicting script and binary directories may result from 1) Hardcoding an NCO script and/or binary pathnames, 2) Having incomplete NCO installations in one or more directories in the \$PATH environment variable, 3) Loading multiple NCO modules with different locations."
echo "HINT (E3SM-only): The ncclimo and ncremap scripts hard-code the NCO binary executable and library paths to point to C. Zender's home directories on the major E3SM machines. This facilitates quicker feature and fix updates to NCO binaries, and means users need only scripts to run climatologies and regridding with the latest NCO. However, this can be counterproductive for those employing carefully coordinated paths, such as MPAS Analysis or A-Prime, which use a Conda-based environment. Those users may turn-off the machine-dependent, hard-coded path by invoking ncclimo and/or ncremap with the fxm switch."
echo "HINT (E3SM-only): The ncclimo and ncremap scripts hard-code the NCO binary executable and library paths to point to C. Zender's home directories on the major E3SM machines. This facilitates quicker feature and fix updates to NCO binaries, and means users need only scripts to run climatologies and regridding with the latest NCO. However, this can be counterproductive for those employing carefully coordinated paths, such as MPAS Analysis or A-Prime, which use a Conda-based environment. Those users may turn-off the machine-dependent, hard-coded path by invoking ncclimo and/or ncremap after altering their environment with: \"export NCO_PATH_OVERRIDE=No\")."
fi # drc_spt
# When running in a terminal window (not in an non-interactive batch queue)...
......@@ -420,6 +420,7 @@ caseid_xmp='famipc5_ne30_v0.3_00003' # [sng] Case ID for examples
cf_flg='Yes' # [sng] Produce CF climatology attribute?
clm_flg='Yes' # [sng] Generate climatology
clm_md='mth' # [sng] Climatology mode ('ann', 'dly', or 'mth')
csn_flg='No' # [flg] Trigger season-processing code
csn_lst='mam,jja,son,djf' # [sng] Seasons to output (subset of mam,jja,son,djf,jfm,amj,jas,ond,fm,on)
dbg_lvl=0 # [nbr] Debugging level
dec_md='scd' # [sng] December mode ('scd' or 'sdd' as per above)
......@@ -757,7 +758,7 @@ else # !bch
fi # !stdin
fi # !bch
if [ ${inp_std} = 'Yes' ] && [ ${inp_psn} = 'Yes' ]; then
echo "${spt_nm}: ERROR expecting input both from stdin and positional command-line arguments\n"
echo "${spt_nm}: ERROR expecting input both from stdin and positional command-line arguments"
exit 1
fi # !inp_std
......@@ -874,13 +875,6 @@ if [[ "${caseid}" =~ ^(.*)([0-9][0-9][0-9][0-9][01][0-9].nc.?)$ ]]; then
out_nm=${bs_nm}
bs_sfx="${caseid#*.}"
fi # !caseid
if [ -n "${csn_lst}" ]; then
csn_flg='Yes'
if [ ${csn_lst} = 'none' ]; then
csn_nbr=0
csn_flg='No'
fi # !csn_lst
fi # !csn_lst
if [ "${clm_md}" != 'ann' ] && [ "${clm_md}" != 'dly' ] && [ "${clm_md}" != 'mth' ] ; then
echo "${spt_nm}: ERROR User-defined climatology mode is ${clm_md}. Valid options are 'ann', 'dly', or 'mth' (default)"
exit 1
......@@ -893,6 +887,11 @@ elif [ "${clm_md}" = 'dly' ]; then
dec_md='sdd'
elif [ "${clm_md}" = 'mth' ]; then
clm_nbr=12
csn_flg='Yes'
if [ ${csn_lst} = 'none' ]; then
csn_nbr=0
csn_flg='No'
fi # !csn_lst
fi # !clm_md
if [ -z "${drc_in}" ]; then
drc_in="${drc_pwd}"
......@@ -1521,8 +1520,10 @@ if [ "${clm_flg}" = 'Yes' ]; then
fi # !bnr_flg
if [ "${mdl_typ}" = 'yyyymm' ]; then
printf "Filenames will be constructed with generic conventions as ${bs_nm}_YYYYMM.${bs_sfx}\n"
elif [ ${inp_aut} = 'Yes' ]; then
printf "Input filenames will be constructed with CESM'ish or E3SM/ACME'ish conventions\n"
else # !mdl_typ
printf "Filenames will be constructed with CESM'ish or E3SM/ACME'ish conventions\n"
printf "Input filenames will be read from globbing, positional arguments, or stdin\n"
fi # !mdl_typ
if [ ${clm_md} = 'ann' ]; then
printf "Each input file assumed to contain mean of one year\n"
......@@ -1531,10 +1532,10 @@ if [ "${clm_flg}" = 'Yes' ]; then
elif [ ${clm_md} = 'mth' ]; then
printf "Each input file assumed to contain mean of one month\n"
fi # !mth
if [ ${clm_md} = 'mth' ]; then
if [ ${csn_flg} = 'No' ]; then
printf "Seasons and therefore annual means derived from seasonal means have been turned-off and will not be computed\n"
elif [ ${clm_md} = 'mth' ]; then
if [ ${dec_md} = 'scd' ]; then
elif [ ${dec_md} = 'scd' ]; then
printf "Winter statistics based on seasonally contiguous December (scd-mode): DJF sequences are consecutive and cross calendar-year boundaries\n"
else
printf "Winter statistics based on seasonally discontiguous December (sdd-mode): DJF sequences comprise three months from the same calendar year\n"
......@@ -1941,6 +1942,16 @@ if [ "${clm_flg}" = 'Yes' ] && [ "${clm_md}" = 'dly' ]; then
fl_all="${fl_all} ${fl_in[${fl_idx}]}"
done # !fl
ppn_opt="-p ${drc_in}"
if [ ${inp_std} = 'Yes' ]; then
# 20171208 Detect whether stdin names are full-path (i.e., contain directory prefix)
# If full-path, then use as-is later on, otherwise prepend drc_in
# Currently this is only done for 'dly', yet could be adapted to sbs as well
if [ "$(basename ${fl_in[0]})" != "${fl_in[0]}" ]; then
ppn_opt=''
fi # !basename
fi # !inp_std
unset dpm # Days per month
declare -a dpm
dpm=(0 31 28 31 30 31 30 31 31 30 31 30 31) # 365-day calendar, 1-based indexing
......@@ -1960,10 +1971,10 @@ if [ "${clm_flg}" = 'Yes' ] && [ "${clm_md}" = 'dly' ]; then
tm_srt="${yyyy_srt}-${MM}-${DD} 00:00:00"
tm_end="${yyyy_end}-${MM}-${DD} 23:59:59"
if [ ${tpd_out} -eq 1 ]; then
cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncra -O ${nco_opt} ${gaa_sng} -d time,'${tm_srt}','${tm_end}',${srd},1 -p ${drc_in} ${fl_all} ${fl_out[${clm_idx}]}"
cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncra -O ${nco_opt} ${gaa_sng} -d time,'${tm_srt}','${tm_end}',${srd},1 ${ppn_opt} ${fl_all} ${fl_out[${clm_idx}]}"
else
# 20170619: Diurnal mode is really slow on MacOSX (UDUnits parsing sluggishness?)
cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncrcat -O ${nco_opt} ${gaa_sng} -d time,'${tm_srt}','${tm_end}',${srd},${drn} -p ${drc_in} ${fl_all} ${fl_cat[${clm_idx}]};/bin/rm -f ${fl_out[${clm_idx}]};for srt in `seq 0 $((tpd_out-1))`; do ncra --rec_apn -d time,\${srt},,${tpd_out} ${nco_opt} ${gaa_sng} ${fl_cat[${clm_idx}]} ${fl_out[${clm_idx}]};done;/bin/rm -f ${fl_cat[${clm_idx}]}"
cmd_clm[${clm_idx}]="${cmd_mpi[${clm_idx}]} ncrcat -O ${nco_opt} ${gaa_sng} -d time,'${tm_srt}','${tm_end}',${srd},${drn} ${ppn_opt} ${fl_all} ${fl_cat[${clm_idx}]};/bin/rm -f ${fl_out[${clm_idx}]};for srt in `seq 0 $((tpd_out-1))`; do ncra --rec_apn -d time,\${srt},,${tpd_out} ${nco_opt} ${gaa_sng} ${fl_cat[${clm_idx}]} ${fl_out[${clm_idx}]};done;/bin/rm -f ${fl_cat[${clm_idx}]}"
fi # !tpd
let clm_idx=${clm_idx}+1
done # !day
......
......@@ -238,13 +238,13 @@ fi # !hrd_pth && !NCO_PATH_OVERRIDE
# ncremap -P mpas -i ${DATA}/hdf/mpaso.hist.am.timeSeriesStatsMonthly.0001-01-01.nc -m ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -O ~/rgr
# ncremap -P mpas -i ${DATA}/hdf/mpascice.hist.am.timeSeriesStatsMonthly.0251-01-01.nc -m ${DATA}/maps/map_oEC60to30_to_t62_bilin.20160301.nc -O ~/rgr
# E3SM/ACME benchmarks:
# ncremap -v FSNT,AODVIS -a conserve -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -m ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -O ~/rgr
# ncremap -v FSNT,AODVIS -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -s ${DATA}/grids/ne30np4_pentagons.091226.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -O ~/rgr
# ncremap -v FSNT,AODVIS -a tempest -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -s ${DATA}/grids/ne30np4_pentagons.091226.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -O ~/rgr
# ncremap -v FSNT,AODVIS -a conserve -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -m ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -o ~/foo.nc
# ncremap -v FSNT,AODVIS -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -s ${DATA}/grids/ne30np4_pentagons.091226.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -o ~/foo.nc
# ncremap -v FSNT,AODVIS -a tempest -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -s ${DATA}/grids/ne30np4_pentagons.091226.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -o ~/foo.nc
# Positional arguments:
# ncremap --var=FSNT,AODVIS --map=${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc --drc_out=~/rgr ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-??.nc
# Omit cell_measures:
# ncremap --no_cll_msr --var=FSNT,AODVIS -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -m ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -O ~/rgr
# ncremap --no_cll_msr --var=FSNT,AODVIS -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -m ${DATA}/maps/map_ne30np4_to_fv129x256_aave.20150901.nc -o ~/foo.nc
# SGS (201705):
# ncremap --vrb=3 -P sgs --var=area,FSDS,landfrac,landmask,TBOT -i ${DATA}/ne30/raw/F_acmev03_enso_camse_clm45bgc_ne30_co2cycle.clm2.h0.2000-01.nc -s ${DATA}/grids/ne30np4_pentagons.091226.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -o ~/alm_rgr.nc # 20170510 1D->2D works conserve and bilinear, no inferral
# ncremap --vrb=3 -P sgs --var=area,FSDS,landfrac,landmask,TBOT -i ${DATA}/essgcm14/essgcm14.clm2.h0.0000-01.nc -s ${DATA}/grids/t42_SCRIP.20150901.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -o ~/t42_rgr.nc # 20170510 2D->2D works conserve and bilinear, no inferral
......@@ -261,8 +261,12 @@ fi # !hrd_pth && !NCO_PATH_OVERRIDE
# ncremap -a conserve -i ${DATA}/hdf/ctl_brcp85c5cn_deg1.enm.cice.h.2050-07.nc -s ${DATA}/grids/gx1v7_151008.nc -g ${DATA}/grids/129x256_SCRIP.20150901.nc -o ~/wenshan.nc # 20170521: conservative requires supplied tri-pole grid for centers/bounds in masked regions
# File-format
# ncremap -v FSNT,AODVIS -s ${DATA}/grids/ne30np4_pentagons.091226.nc -i ${DATA}/ne30/raw/famipc5_ne30_v0.3_00003.cam.h0.1979-01.nc -d ${DATA}/dstmch90/dstmch90_clm.nc -o ~/foo.nc
# TempestRemap boutique:
# GenerateCSMesh --alt --res 30 --file ${DATA}/grids/ne30.g
# ncremap --dbg=1 --src_grd=${DATA}/grids/ne30.g --dst_grd=${DATA}/grids/129x256_SCRIP.20150901.nc -m ~/map_ne30np4_to_fv129x256_mono.20180101.nc -a se2fv_flx
# ncremap --dbg=1 --src_grd=${DATA}/grids/ne30.g --se_mtd=${DATA}/grids/ne30np4_latlon.091226.nc --dst_grd=${DATA}/grids/129x256_SCRIP.20150901.nc -m ~/map_ne30np4_to_fv129x256_mono.20180101.nc -a se2fv_flx
# Debugging and Benchmarking:
# ncremap -D 1 -i ${DATA}/sld/raw/AIRS.2014.10.01.202.L2.TSurfStd.Regrid010.1DLatLon.hole.nc -d ${DATA}/dstmch90/dstmch90_clm.nc -O ~/rgr > ~/ncremap.out 2>&1 &
# ncremap -D 1 -i ${DATA}/sld/raw/AIRS.2014.10.01.202.L2.TSurfStd.Regrid010.1DLatLon.hole.nc -d ${DATA}/dstmch90/dstmch90_clm.nc -o ~/foo.nc > ~/ncremap.out 2>&1 &
# dbg_lvl: 0 = Quiet, print basic status during evaluation
# 1 = Print configuration, full commands, and status to output during evaluation
......@@ -309,7 +313,7 @@ fi # !TERM
# Defaults for command-line options and some derived variables
# Modify these defaults to save typing later
alg_typ='bilinear' # [nbr] Algorithm for interpolation (bilinear|conserve|nearestdtos|neareststod|patch|tempest)
alg_typ='bilinear' # [nbr] Algorithm for remapping (bilinear|conserve|nearestdtos|neareststod|patch|tempest|se2fv_flx|se2fv_stt|se2fv_alt|fv2se_flx|fv2se_stt|fv2se_alt)
bch_pbs='No' # [sng] PBS batch (non-interactive) job
bch_slr='No' # [sng] SLURM batch (non-interactive) job
cln_flg='Yes' # [flg] Clean-up (remove) intermediate files before exiting
......@@ -326,6 +330,7 @@ dst_xmp='dst.nc' # [sng] Destination file for examples
fl_fmt='' # [enm] Output file format
fl_nbr=0 # [nbr] Number of files to remap
gaa_sng="--gaa remap_script=${spt_nm} --gaa remap_command=\"'${cmd_ln}'\" --gaa remap_hostname=${HOSTNAME} --gaa remap_version=${nco_vrs}" # [sng] Global attributes to add
gll_fl='' # [sng] GLL grid metadata (geometry+connectivity+Jacobian) file
grd_dst='' # [sng] Destination grid-file
grd_dst_xmp='grd_dst.nc' # [sng] Destination grid-file for examples
grd_sng='' # [sng] Grid string
......@@ -380,7 +385,7 @@ vrb_4=4 # [enm] Verbosity level: Pedantic
vrs_prn='No' # [sng] Print version information
wgt_exe_esmf='ESMF_RegridWeightGen' # [sng] ESMF executable
wgt_exe_tps='GenerateOfflineMap' # [sng] TempestRemap executable
wgt_typ='esmf' # [sng] Weight-generator program
wgt_typ='esmf' # [sng] Weight-generator program ('esmf' or 'tempest')
wgt_opt='' # [sng] Weight-generator options
#wgt_opt_esmf='--ignore_unmapped --netcdf4' # [sng] ESMF_RegridWeightGen options
wgt_opt_esmf='--ignore_unmapped' # [sng] ESMF_RegridWeightGen options
......@@ -409,11 +414,14 @@ function fnc_usg_prn { # NB: dash supports fnc_nm (){} syntax, not function fnc_
echo "${fnt_rvr}-6${fnt_nrm} Output file format 64BIT_OFFSET (netCDF3 64bit CDF2) [${fnt_tlc}fl_fmt, file_format=64bit_offset${fnt_nrm}]"
echo "${fnt_rvr}-7${fnt_nrm} Output file format NETCDF4_CLASSIC (netCDF4 classic) [${fnt_tlc}fl_fmt, file_format=netcdf4_classic${fnt_nrm}]"
echo "${fnt_rvr}-a${fnt_nrm} ${fnt_bld}alg_typ${fnt_nrm} Algorithm for weight generation (default ${fnt_bld}${alg_typ}${fnt_nrm}) [${fnt_tlc}alg_typ, algorithm, regrid_algorithm${fnt_nrm}]"
echo " ESMF algorithms: bilinear|conserve|nearestdtos|neareststod|patch|tempest|se2fv_flx|se2fv_stt|se2fv_alt|fv2se_flx|fv2se_stt|fv2se_alt"
echo " Tempest algorithms: tempest|se2fv_flx|se2fv_stt|se2fv_alt|fv2se_flx|fv2se_stt|fv2se_alt"
echo "${fnt_rvr}-d${fnt_nrm} ${fnt_bld}dst_fl${fnt_nrm} Data file to infer destination grid from (empty means none, i.e., use grd_fl, grd_sng or map_fl)) (default ${fnt_bld}${dst_fl}${fnt_nrm}) [${fnt_tlc}dst_fl, destination_file, template_file, template${fnt_nrm}]"
echo "${fnt_rvr}-D${fnt_nrm} ${fnt_bld}dbg_lvl${fnt_nrm} Debug level (default ${fnt_bld}${dbg_lvl}${fnt_nrm}) [${fnt_tlc}dbg_lvl, dbg, debug, debug_level${fnt_nrm}]"
echo " ${fnt_bld}--fl_fmt${fnt_nrm} File format (empty is netCDF3 64bit CDF2) (default ${fnt_bld}${fl_fmt}${fnt_nrm}) [${fnt_tlc}fl_fmt, fmt_out, file_format, format_out${fnt_nrm}]"
echo "${fnt_rvr}-G${fnt_nrm} ${fnt_bld}grd_sng${fnt_nrm} Grid generation argument string (empty means none) (default ${fnt_bld}${grd_sng}${fnt_nrm}) [${fnt_tlc}grd_sng, grid_generation, grid_gen, grid_string${fnt_nrm}]"
echo "${fnt_rvr}-g${fnt_nrm} ${fnt_bld}grd_dst${fnt_nrm} Grid-file (destination) (empty means none, i.e., infer from dst_fl or use grd_sng or map_fl) (default ${fnt_bld}${grd_dst}${fnt_nrm}) [${fnt_tlc}grd_dst, grid_dest, dest_grid, destination_grid${fnt_nrm}]"
echo "${fnt_rvr}-g${fnt_nrm} ${fnt_bld}grd_dst${fnt_nrm} Grid-file (destination) (empty means none, i.e., infer from dst_fl or use grd_sng or map_fl) (default ${fnt_bld}${grd_dst}${fnt_nrm}) [${fnt_tlc}grd_dst, grid_dest, dst_grd, dest_grid, destination_grid${fnt_nrm}]"
echo " ${fnt_bld}--gll_fl${fnt_nrm} GLL metadata (SE grid geometry+connectivity+Jacobian) file (default ${fnt_bld}${gll_fl}${fnt_nrm}) [${fnt_tlc}gll_fl, gll_mtd, se_gmt, se_mtd${fnt_nrm}]"
echo "${fnt_rvr}-I${fnt_nrm} ${fnt_bld}drc_in${fnt_nrm} Input directory (empty means none) (default ${fnt_bld}${drc_in}${fnt_nrm}) [${fnt_tlc}drc_in, in_drc, dir_in, in_dir, input${fnt_nrm}]"
echo "${fnt_rvr}-i${fnt_nrm} ${fnt_bld}in_fl${fnt_nrm} Input file (empty means pipe to stdin or drc_in) (default ${fnt_bld}${in_fl}${fnt_nrm}) [${fnt_tlc}in_fl, in_file, input_file${fnt_nrm}]"
echo "${fnt_rvr}-j${fnt_nrm} ${fnt_bld}job_nbr${fnt_nrm} Job simultaneity for parallelism (default ${fnt_bld}${job_nbr}${fnt_nrm}) [${fnt_tlc}job_nbr, job_number, jobs${fnt_nrm}]"
......@@ -543,10 +551,11 @@ while getopts :34567a:CD:d:f:g:G:h:I:i:j:L:Mm:n:O:o:P:p:R:r:s:T:t:Uu:V:v:W:w:x:-
dbg_lvl=?* | dbg=?* | debug=?* | debug_level=?* ) dbg_lvl="${LONG_OPTARG}" ;; # -d # Debugging level
dfl_lvl=?* | deflate=?* | dfl=?* ) dfl_lvl="${LONG_OPTARG}" ;; # -L # Deflate level
dst_fl=?* | destination_file=?* | template_file=?* | template=?* ) dst_fl="${LONG_OPTARG}" ;; # -d # Destination file
grd_dst=?* | grid_dest=?* | dest_grid=?* | destination_grid=?* ) grd_dst="${LONG_OPTARG}" ;; # -g # Destination grid-file
grd_dst=?* | grid_dest=?* | dst_grd=?* | dest_grid=?* | destination_grid=?* ) grd_dst="${LONG_OPTARG}" ;; # -g # Destination grid-file
grd_sng=?* | grid_generation=?* | grid_gen=?* | grid_string=?* ) grd_sng="${LONG_OPTARG}" ;; # -G # Grid generation string
drc_in=?* | in_drc=?* | dir_in=?* | in_dir=?* | input=?* ) drc_in="${LONG_OPTARG}" ;; # -i # Input directory
fl_fmt=?* | fmt_out=?* | file_format=?* | format_out=?* ) fl_fmt="${LONG_OPTARG}" ;; # # Output file format
gll_fl=?* | gll_mtd=?* | se_gmt=?* | se_mtd=?* ) gll_fl="${LONG_OPTARG}" ;; # # GLL grid metadata (geometry+connectivity+Jacobian) file
in_fl=?* | in_file=?* | input_file=?* ) in_fl="${LONG_OPTARG}" ;; # -i # Input file
job_nbr=?* | job_number=?* | jobs=?* ) job_usr="${LONG_OPTARG}" ;; # -j # Job simultaneity
mlt_map | multimap | no_multimap | nomultimap ) mlt_map_flg='No' ;; # -M # Multi-map flag
......@@ -737,7 +746,7 @@ pdq_fl=${pdq_fl}${unq_sfx}
tmp_out_fl=${tmp_out_fl}${unq_sfx}
znl_fl=${znl_fl}${unq_sfx}
# Algorithm options are bilinear|conserve|nearestdtos|neareststod|patch|tempest
# Algorithm options are bilinear|conserve|nearestdtos|neareststod|patch|tempest|se2fv_flx|se2fv_stt|se2fv_alt|fv2se_flx|fv2se_stt|fv2se_alt
if [ ${alg_typ} = 'bilinear' ] || [ ${alg_typ} = 'bln' ]; then
alg_opt='bilinear'
elif [ ${alg_typ} = 'conserve' ] || [ ${alg_typ} = 'conservative' ] || [ ${alg_typ} = 'cns' ]; then
......@@ -749,10 +758,54 @@ elif [ ${alg_typ} = 'neareststod' ] || [ ${alg_typ} = 'nsd' ] || [ ${alg_typ} =
elif [ ${alg_typ} = 'patch' ] || [ ${alg_typ} = 'pch' ] || [ ${alg_typ} = 'ptc' ]; then
alg_opt='patch'
elif [ ${alg_typ} = 'tempest' ] || [ ${alg_typ} = 'tps' ] || [ ${alg_typ} = 'tmp' ]; then
# 20171108 'tempest' invokes TempestRemap with no automatic options, suitable for RLL re-mapping?
# 20171108 TempestRemap boutique options based on particular remapping type
# https://acme-climate.atlassian.net/wiki/spaces/Docs/pages/178848194/Transition+to+TempestRemap+for+Atmosphere+grids
# map_tag in comments is for E3SM naming convention map_src_to_dst_${map_tag}.20180101.nc
alg_opt='tempest'
wgt_typ='tempest'
elif [ ${alg_typ} = 'se2fv_flx' ] || [ ${alg_typ} = 'mono_se2fv' ] || [ ${alg_typ} = 'conservative_monotone_se2fv' ]; then # map_tag='mono'
wgt_opt_tps="--in_type cgll --in_np 4 --out_type fv --out_double --mono"
if [ -n "${gll_fl}" ]; then
wgt_opt_tps="--in_meta ${gll_fl} ${wgt_opt_tps}"
fi # !gll_fl
wgt_typ='tempest'
elif [ ${alg_typ} = 'se2fv_stt' ] || [ ${alg_typ} = 'highorder_se2fv' ] || [ ${alg_typ} = 'accurate_conservative_nonmonotone_se2fv' ]; then # map_tag='highorder'
wgt_opt_tps="--in_type cgll --in_np 4 --out_type fv --out_double"
if [ -n "${gll_fl}" ]; then
wgt_opt_tps="--in_meta ${gll_fl} ${wgt_opt_tps}"
fi # !gll_fl
wgt_typ='tempest'
elif [ ${alg_typ} = 'se2fv_alt' ] || [ ${alg_typ} = 'intbilin_se2fv' ] || [ ${alg_typ} = 'accurate_monotone_nonconservative_se2fv' ]; then # map_tag='intbilin'
wgt_opt_tps="--in_type cgll --in_np 4 --out_type fv --out_double --mono3 --noconserve"
if [ -n "${gll_fl}" ]; then
wgt_opt_tps="--in_meta ${gll_fl} ${wgt_opt_tps}"
fi # !gll_fl
wgt_typ='tempest'
elif [ ${alg_typ} = 'fv2se_flx' ] || [ ${alg_typ} = 'monotr_fv2se' ] || [ ${alg_typ} = 'conservative_monotone_fv2se' ]; then # map_tag='monotr'
wgt_opt_tps="--in_type cgll --in_np 4 --out_type fv --out_double --mono"
if [ -n "${gll_fl}" ]; then
wgt_opt_tps="--in_meta ${gll_fl} ${wgt_opt_tps}"
fi # !gll_fl
wgt_typ='tempest'
# fxm: transpose maps need additional call:
# ./GenerateTransposeMap --in map_atm2ocn_mono.nc --out map_ocn2atm_monotr.nc
elif [ ${alg_typ} = 'fv2se_stt' ] || [ ${alg_typ} = 'highorder_fv2se' ] || [ ${alg_typ} = 'accurate_conservative_nonmonotone_fv2se' ]; then # map_tag='highorder'
wgt_opt_tps="--in_type fv --in_np 2 --out_type cgll --out_np 4 --out_double --volumetric"
if [ -n "${gll_fl}" ]; then
wgt_opt_tps="${wgt_opt_tps} --out_meta ${gll_fl}"
fi # !gll_fl
wgt_typ='tempest'
elif [ ${alg_typ} = 'fv2se_alt' ] || [ ${alg_typ} = 'mono_fv2se' ] || [ ${alg_typ} = 'conservative_monotone_fv2se_alt' ]; then # map_tag='mono'
wgt_opt_tps="--in_type fv --in_np 1 --out_type cgll --out_np 4 --out_double --mono --volumetric"
if [ -n "${gll_fl}" ]; then
wgt_opt_tps="${wgt_opt_tps} --out_meta ${gll_fl}"
fi # !gll_fl
wgt_typ='tempest'
else
echo "${spt_nm}: ERROR ${alg_typ} is not a valid interpolation option\n"
echo "${spt_nm}: HINT Valid interpolation options are bilinear|conserve|nearestdtos|neareststod|patch|tempest\n"
echo "${spt_nm}: ERROR ${alg_typ} is not a valid remapping algorithm\n"
echo "${spt_nm}: HINT Valid ESMF remapping algorithms are bilinear,bln | conserve,cns | nearestdtos,nds,dtos | neareststod,nsd,stod | patch,pch\n"
echo "${spt_nm}: HINT Valid TempestRemap remapping options are tempest | se2fv_flx,mono_se2fv | se2fv_stt,highorder_se2fv | se2fv_alt,intbilin | fv2se_flx,monotr_fv2se | fv2se_stt,highorder_fv2se | fv2se_alt,mono_fv2se\n"
exit 1
fi # !alg_typ
if [ -n "${fl_fmt}" ]; then
......@@ -777,9 +830,6 @@ if [ -n "${fl_fmt}" ]; then
fi # !fl_fmt
nco_opt="${nco_opt} ${nco_fl_fmt}"
fi # !fl_fmt
if [ ${alg_opt} = 'tempest' ]; then
wgt_typ='tempest'
fi # alg_opt
if [ ${wgt_typ} = 'esmf' ]; then
wgt_cmd="${wgt_exe_esmf}"
wgt_exe="${wgt_exe_esmf}"
......@@ -1159,6 +1209,7 @@ if [ ${dbg_lvl} -ge 2 ]; then
printf "dbg: dst_fl = ${dst_fl}\n"
printf "dbg: fl_fmt = ${fl_fmt}\n"
printf "dbg: gaa_sng = ${gaa_sng}\n"
printf "dbg: gll_fl = ${gll_fl}\n"
printf "dbg: grd_dst = ${grd_dst}\n"
printf "dbg: grd_sng = ${grd_sng}\n"
printf "dbg: grd_src = ${grd_src}\n"
......
nco (4.7.1-1~bpo9+1) stretch-backports; urgency=medium
* Rebuild for stretch-backports.
-- Bas Couwenberg <sebastic@debian.org> Sun, 07 Jan 2018 11:52:47 +0100
nco (4.7.1-1) unstable; urgency=medium
* New upstream release.
* Bump Standards-Version to 4.1.2, no changes.
-- Bas Couwenberg <sebastic@debian.org> Thu, 21 Dec 2017 18:29:08 +0100
nco (4.7.0-1~bpo9+1) stretch-backports; urgency=medium
* Rebuild for stretch-backports.
......
......@@ -18,7 +18,7 @@ Build-Depends: debhelper (>= 9),
libudunits2-dev,
libdap-dev,
texinfo
Standards-Version: 4.1.1
Standards-Version: 4.1.2
Vcs-Browser: https://anonscm.debian.org/cgit/pkg-grass/nco.git
Vcs-Git: https://anonscm.debian.org/git/pkg-grass/nco.git -b stretch-backports
Homepage: http://nco.sourceforge.net/
......
$Header$ -*-text-*-
The netCDF Operators NCO version 4.7.0 are released.
The netCDF Operators NCO version 4.7.1 have escaped.
http://nco.sf.net (Homepage, Mailing lists)
http://nco.sf.net (Homepage, Mailing lists, Help)
http://github.com/nco (Source Code, Releases, Developers)
What's new?
Version 4.7.0 adds spit and polish through the toolkit.
Nothing major stands out, except perhaps for easy access
to compression algorithms in ncclimo and ncremap.
Version 4.7.1 contains mostly minor fixes and few new features.
One major improvement, long in development, is a Conda Windows port.
Most NCO functionality is now easily available to Windows users
through the Conda distribution.
Pedro Vicente turned his Windows scripts into a CMake implementation.
Building on Pedro's CMake contribution, a team of about six volunteers
used, refined, or implemented Windows ports for the full NCO software
stack. Filipe Fernandez coordinated the Conda work, added AppVeyor
support, and brought in extra help and testing. Pedro and Isuru
Fernando tracked down bugs and improved the build matrix.
No one person could have accomplished this.
The power of Open Source development is awesome.
Work on NCO 4.7.1 has commenced. Planned changes include
better diagnosis and workarounds for the netCDF CDF5 bug,
and parallel weight generation by ncremap.
parallel weight generation by ncremap, and possibly workarounds for
using quotation marks with ncap2 in Windows.
Enjoy,
Charlie
NEW FEATURES (full details always in ChangeLog):
A. ncclimo and ncremap accept fl_fmt options for output file format.
The option behaves the same as it does for other operators:
ncclimo [-3 -4 -5 -6 -7] ...
ncremap [-3 -4 -5 -6 -7] ...
ncclimo --fl_fmt=netcdf4 ...
ncremap --fl_fmt=cdf5 ...
http://nco.sf.net/nco.html#fl_fmt
http://nco.sf.net/nco.html#autoconversion
B. ncclimo and ncremap accept the dfl_lvl option for compression.
The value of dfl_lvl is an integer fro 0..9 that specifies the
DEFLATE (= Lempel-Ziv + Huffman) compression level for netCDF4.
The option behaves the same as it does for other operators:
ncclimo --dfl_lvl=3 ...
ncremap --dfl_lvl=3 ...
ncclimo -L 3 ...
ncremap -L 3 ...
http://nco.sf.net/nco.html#deflate
C. ncclimo accepts the ppc option for Precision-Preserving Compression.
Other operators allow multiple, per-variable PPC specification.
ncclimo allows only one PPC option, equivalent to specifying
--ppc default=<val> with the compiled operators.
NB: regridding does not yet accept PPC.
ncclimo --ppc=3 ...
http://nco.sf.net/nco.html#ppc
http://nco.sf.net/nco.html#bitgrooming
D. ncrename is now quiet by default, like all other operators.
The old summary line that tallied the numbers of things renamed
can still be seen by invoking ncrename with -D 1.
http://nco.sf.net#ncrename
E. CMake: Pedro Vicente contributed a CMake build-engine in 4.6.9.
The CMake engine now correctly links ncap2 to intrinsic math
functions like erf(), gamma(), ...
Please give us feedback on any wrinkles in the CMake build.
To build with CMake and install in /usr/local:
cd nco/cmake
cmake .. -DCMAKE_INSTALL_PREFIX=/usr/local
make
sudo make install
Additional examples in cmake/build.bat
http://nco.sf.net#bld
F. The automatic CDF5 bug checker in 4.6.9 has been turned-off.
It produced too much distracting output in workflows affected
by the CDF5 bug. To activate this check, build NCO with, e.g.,
CPPFLAGS='-DNCO_CDF5_BUG_CHK' ./configure ...
The checker itself has been improved so that, when turned on,
operators are now quiet when sniffing CDF5 files for corruption,
until they detect a "large" (> 4 GB) variable.
In 4.6.9 the sniffing algorithm was noisier, and printed an INFO
upon closing any CDF5 file.
http://nco.sf.net#bug_cdf5
A. NCO Conda package for Windows: All NCO binaries are thought
to work correctly, although the regression test script itself
(which is written in Perl) has not yet been run on Windows.
Windows' idiosyncratic quoting rules can interfere with ncap2
(try running from Powershell to use UNIXy quotes).
The two scripted operators, ncclimo and ncremap, are problematic.
As sophisticated Bash scripts, these do not work on Windows.
Volunteers to port them to a Windows-friendly language (e.g.,
Python) are welcome!
Within a few days of release, try this Windows install command:
conda install -c conda-forge nco
http://nco.sf.net/nco.html#conda
B. ncclimo in daily mode now handles stdin input more cleverly.
Piping or redirecting filename lists to ncclimo --clm_md=dly
is the easiest way to feed huge lists of filenames to ncclimo.
However, these lists are often dominated in size by the path
component of the filename, rather than the filename itself.
Now ncclimo allows specification of the path with -i drc_in,
and it will prepend that path to the standard input names.
Previously it prepended the current working directory.
This functionality allows easy retention of the full
provenance of the input files, without requiring repetitive
inclusion of long pathnames in the filenames.
ls *.clm2.h0.* | ncclimo -c caseid -C dly -s 1960 -e 2005 -i data
http://nco.sf.net/nco.html#ncclimo
C. JSON printing of netCDF3 strings, i.e., character arrays,
has changed. Previously ncks printed square brackets around
the string, as with numeric arrays. However this was somewhat
redundant since the quotation marks around a JSON string already
indicate an array of characters. Now ncks omits the inner-most
(i.e., most rapidly varying) set of brackets from the printing of
character arrays:
ncks --jsn_fmt=2 -C -H -v char_var_2D_arr ~/nco/data/in.nc
{
"dimensions": {
"lat": 2,
"lon": 4
},
"variables": {
"char_var_2D_arr": {
"shape": ["lat", "lon"],
"type": "char",
"data": ["one", "two"]
...
This is instead of "data": [["one"], ["two"]]
in the last line.
http://nco.sf.net/nco.html#json
BUG FIXES:
A. Fix bug where ncra and ncrcat treated negative record hyperslab
indices with old (pre-2014) convention, and were off-by-one
relative to the 2014+ Python-based convention. Previously
"ncrcat -d time,-1" extracted the penultimate record, now it
extracts the last. This bug only affected the record dimension
of the multi-file operators ncra and ncrcat.
A. Fix bug where ncclimo --clm_md=dly/ann expected a season list.
This bug was inadvertently introduced with season code in 4.6.8.
The workaround is to use NCO <= 4.6.7 or upgrade to 4.7.1.
B. ncks in JSON, CDL, and XML modes now prints a single NUL as "".
Previously a scalar NUL printed as "0", as in ncdump's CDL.
However, we think this is a bug in ncdump since NUL is not zero.
Full release statement at http://nco.sf.net/ANNOUNCE
......@@ -90,9 +97,9 @@ KNOWN PROBLEMS DUE TO NCO:
This section of ANNOUNCE reports and reminds users of the
existence and severity of known, not yet fixed, problems.
These problems occur with NCO 4.7.0 built/tested under
MacOS 10.12.6 with netCDF 4.4.1.1 on HDF5 1.10.1 and with
Linux with netCDF 4.5.1-development (201701101) on HDF5 1.8.19.
These problems occur with NCO 4.7.1 built/tested under
MacOS 10.13.2 with netCDF 4.4.1.1 on HDF5 1.10.1 and with
Linux with netCDF 4.5.1-development (20171220) on HDF5 1.8.19.
A. NOT YET FIXED (NCO problem)
Correctly read arrays of NC_STRING with embedded delimiters in ncatted arguments
......@@ -149,7 +156,7 @@ B. NOT YET FIXED (netCDF4 library bug)
20170323: Verified problem still exists with netCDF 4.4.2-development
20170323: https://github.com/Unidata/netcdf-c/issues/381
20171102: Verified problem still exists with netCDF 4.5.1-development
20171107: https://github.com/Unidata/netcdf-c/issues/597
Bug tracking: https://www.unidata.ucar.edu/jira/browse/fxm
More details: http://nco.sf.net/nco.html#ncrename_crd
......@@ -162,6 +169,7 @@ C. NOT YET FIXED (netCDF4 library bug)
20170323: Confirmed problem reported by Paolo Oliveri, reported to Unidata
20170323: https://github.com/Unidata/netcdf-c/issues/381
20171102: Verified problem still exists with netCDF 4.5.1-development
20171107: https://github.com/Unidata/netcdf-c/issues/597
Bug tracking: https://www.unidata.ucar.edu/jira/browse/fxm
More details: http://nco.sf.net/nco.html#ncrename_crd
......
2017-12-21 Charlie Zender <zender@uci.edu>
* NCO 4.7.1 release procedure:
cd ~/nco;git commit -a -m 'Version 4.7.1: Ajudar';git push
git tag -a 4.7.1 -m 'WIN64 ABI; Conda Windows; JSON tweaks; fix clm_md=dly,ann;';git push --tags
2017-12-20 Charlie Zender <zender@uci.edu>
* NCO 4.7.1-beta01 release procedure:
cd ~/nco;git commit -a -m 'Version 4.7.1-beta01: Neverwhere';git push
git tag -a 4.7.1-beta01 -m 'JSON NUL, brackets, Conda Windows';git push --tags
* Fix Travis build matrix to eliminate false negatives (thanks to @isuruf)
* Print scalar char of NUL as "" instead of 0 in JSON, CDL, XML
* Remove inner set of brackets when printing character arrays in JSON
2017-12-18 Charlie Zender <zender@uci.edu>
* Add detection of netcdf_mem.h to autoconf and #define HAVE_NETCDF_MEM_H accordingly
2017-12-16 Charlie Zender <zender@uci.edu>
* Add AppVeyor to build conda recipe for Windows. Contributed by Filipe.
2017-12-14 Charlie Zender <zender@uci.edu>
* CMakefile.txt and nco_sng_utl.h changes for Conda Windows package
2017-12-08 Charlie Zender <zender@uci.edu>
* NCO 4.7.1-alpha03 release procedure:
cd ~/nco;git commit -a -m 'Version 4.7.1-alpha03: Red Velvet';git push
git tag -a 4.7.1-alpha03 -m 'ncclimo fix clm_md=dly,ann bugs and add prepend savvy';git push --tags
* ncclimo: for clm_md=dly and inp_std, prepend drc_in to fl_in iff stdin names are basenames
2017-12-07 Charlie Zender <zender@uci.edu>
* Stop ncclimo from setting csn_flg when clm_md='dly' or 'ann'
* Fix ncclimo E3SM instruction on how to override default PATH at HPCs
2017-12-03 Charlie Zender <zender@uci.edu>
* Warn when input file contains non-atomic variables
2017-11-15 Charlie Zender <zender@uci.edu>
* NCO 4.7.1-alpha02 release procedure:
cd ~/nco;git commit -a -m 'Version 4.7.1-alpha02: Raspberry-infused vodka';git push
git tag -a 4.7.1-alpha02 -m 'long/size_t workaround for WIN64';git push --tags
* long/size_t workaround for var1 routines
* Change most NC_MAX_DIMS to NC_MAX_VAR_DIMS
* long/size_t workaround for srt,cnt and long/ptrdiff_t workaround for srd,map
* WIN64 workaround for sizeof(long) = 4 != 8 = sizeof(size_t) with dmn_sz, att_sz
2017-11-12 Charlie Zender <zender@uci.edu>
* Modify how netcdf_mem.h and nc_open_mem() are detected/provided
2017-11-10 Charlie Zender <zender@uci.edu>
* ncremap: Use gll_fl only when supplied, do not require it
* ncremap: Implement all boutique Tempest algorithms except for transpose
* ncrename: report collective rename bugs in https://github.com/Unidata/netcdf-c/issues/597
2017-11-08 Charlie Zender <zender@uci.edu>
 
* ncremap: Add hooks for more elaborate TempestRemap options
* NCO 4.7.1-alpha01 release procedure:
cd ~/nco;git commit -a -m 'Version 4.7.1-alpha01: 1.3 lb salad';git push
git tag -a 4.7.1-alpha01 -m 'Identical to 4.7.0 except for version info';git push --tags
* NCO 4.7.0 release procedure:
cd ~/nco;git commit -a -m 'Version 4.7.0: Ta Nehisi';git push
git tag -a 4.7.0 -m 'ncremap/ncclimo dfl_lvl, fl_fmt;ncclimo ppc;nco_map/msh_mk();ncra/ncrcat neg hyp;quiet CDF5, ncrename';git push --tags
......