/* Pick a chunk length for each dimension, if one has not already * been picked above. */ for (d = 0; d < var->ndims; d++) if (!var->chunksizes[d]) { size_t suggested_size; suggested_size = (pow((double)DEFAULT_CHUNK_SIZE/(num_values * type_size), 1/(double)(var->ndims - num_set)) * var->dim[d]->len - .5); if (suggested_size > var->dim[d]->len) suggested_size = var->dim[d]->len; var->chunksizes[d] = suggested_size ? suggested_size : 1; LOG((4, "nc_def_var_nc4: name %s dim %d DEFAULT_CHUNK_SIZE %d num_values %f type_size %d " "chunksize %ld", var->name, d, DEFAULT_CHUNK_SIZE, num_values, type_size, var->chunksizes[d])); } /* But did this add up to a chunk that is too big? */ retval = check_chunksizes(grp, var, var->chunksizes); if (retval) { /* Other error? */ if (retval != NC_EBADCHUNK) return retval; /* Chunk is too big! Reduce each dimension by half and try again. */ for ( ; retval == NC_EBADCHUNK; retval = check_chunksizes(grp, var, var->chunksizes)) for (d = 0; d < var->ndims; d++) var->chunksizes[d] = var->chunksizes[d]/2 ? var->chunksizes[d]/2 : 1; } /* Do we have any big data overhangs? They can be dangerous to * babies, the elderly, or confused campers who have had too much * beer. */ #define NC_ALLOWED_OVERHANG .1 for (d = 0; d < var->ndims; d++) for ( ; var->dim[d]->len % var->chunksizes[d] > var->dim[d]->len * NC_ALLOWED_OVERHANG; ) var->chunksizes[d] -= var->dim[d]->len * NC_ALLOWED_OVERHANG;
/* Define chunking for a variable. This must be done after nc_def_var and before nc_enddef. */ EXTERNL int nc_def_var_chunking(int ncid, int varid, int storage, const size_t *chunksizesp); /* Inq chunking stuff for a var. */ EXTERNL int nc_inq_var_chunking(int ncid, int varid, int *storagep, size_t *chunksizesp);