diff --git a/fiona/_shim1.pxd b/fiona/_shim1.pxd index 4b2bf7ff1..a1b998a8c 100644 --- a/fiona/_shim1.pxd +++ b/fiona/_shim1.pxd @@ -23,7 +23,8 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef (int, int, int) get_proj_version() - +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField) from fiona._shim cimport OGR_F_GetFieldAsInteger as OGR_F_GetFieldAsInteger64 from fiona._shim cimport OGR_F_SetFieldInteger as OGR_F_SetFieldInteger64 diff --git a/fiona/_shim1.pyx b/fiona/_shim1.pyx index 3afd86a5c..4e6514d8a 100644 --- a/fiona/_shim1.pyx +++ b/fiona/_shim1.pyx @@ -136,3 +136,23 @@ cdef void set_proj_search_path(object path): cdef (int, int, int) get_proj_version(): return (-1, -1, -1) + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + cdef int nSecond + nSecond = int(fSecond) + OGR_F_SetFieldDateTime(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, nSecond, nTZFlag) + + +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField): + cdef int retval + cdef int nYear = 0 + cdef int nMonth = 0 + cdef int nDay = 0 + cdef int nHour = 0 + cdef int nMinute = 0 + cdef int nSecond = 0 + cdef int nTZFlag = 0 + + retval = OGR_F_GetFieldAsDateTime(cogr_feature, iField, &nYear, &nMonth, &nDay, &nHour, &nMinute, &nSecond, &nTZFlag) + return (retval, nYear, nMonth, nDay, nHour, nMinute, float(nSecond), nTZFlag) diff --git a/fiona/_shim2.pxd b/fiona/_shim2.pxd index 76c66dbe4..c46505f0c 100644 --- a/fiona/_shim2.pxd +++ b/fiona/_shim2.pxd @@ -16,3 +16,5 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef (int, int, int) get_proj_version() +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField) diff --git a/fiona/_shim2.pyx b/fiona/_shim2.pyx index 0ee829e8b..414e5614a 100644 --- a/fiona/_shim2.pyx +++ b/fiona/_shim2.pyx @@ -138,3 +138,22 @@ cdef void set_proj_search_path(object path): cdef (int, int, int) get_proj_version(): return (-1, -1, -1) + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) + + +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField): + cdef int retval + cdef int nYear = 0 + cdef int nMonth = 0 + cdef int nDay = 0 + cdef int nHour = 0 + cdef int nMinute = 0 + cdef float fSecond = 0.0 + cdef int nTZFlag = 0 + + retval = OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, &nYear, &nMonth, &nDay, &nHour, &nMinute, &fSecond, &nTZFlag) + + return (retval, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff --git a/fiona/_shim22.pxd b/fiona/_shim22.pxd index 0deaab3f1..33b56cba8 100644 --- a/fiona/_shim22.pxd +++ b/fiona/_shim22.pxd @@ -16,3 +16,5 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef (int, int, int) get_proj_version() +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField) diff --git a/fiona/_shim22.pyx b/fiona/_shim22.pyx index 50aff7696..dec266aaa 100644 --- a/fiona/_shim22.pyx +++ b/fiona/_shim22.pyx @@ -148,3 +148,22 @@ cdef void set_proj_search_path(object path): cdef (int, int, int) get_proj_version(): return (-1, -1, -1) + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) + + +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField): + cdef int retval + cdef int nYear = 0 + cdef int nMonth = 0 + cdef int nDay = 0 + cdef int nHour = 0 + cdef int nMinute = 0 + cdef float fSecond = 0.0 + cdef int nTZFlag = 0 + + retval = OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, &nYear, &nMonth, &nDay, &nHour, &nMinute, &fSecond, &nTZFlag) + + return (retval, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff --git a/fiona/_shim3.pxd b/fiona/_shim3.pxd index 61724ec72..7927bb2cc 100644 --- a/fiona/_shim3.pxd +++ b/fiona/_shim3.pxd @@ -16,3 +16,5 @@ cdef const char* osr_get_name(OGRSpatialReferenceH hSrs) cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) cdef void set_proj_search_path(object path) cdef (int, int, int) get_proj_version() +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag) +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField) diff --git a/fiona/_shim3.pyx b/fiona/_shim3.pyx index 9a8899552..20d75b159 100644 --- a/fiona/_shim3.pyx +++ b/fiona/_shim3.pyx @@ -15,7 +15,7 @@ cdef extern from "ogr_srs_api.h" nogil: void OSRSetPROJSearchPaths(const char *const *papszPaths) -from fiona.ogrext2 cimport * +from fiona.ogrext3 cimport * from fiona._err cimport exc_wrap_pointer from fiona._err import cpl_errs, CPLE_BaseError, FionaNullPointerError from fiona.errors import DriverError @@ -167,3 +167,22 @@ cdef (int, int, int) get_proj_version(): cdef int patch OSRGetPROJVersion(&major, &minor, &patch) return (major, minor, patch) + + +cdef void set_field_datetime(void *cogr_feature, int iField, int nYear, int nMonth, int nDay, int nHour, int nMinute, float fSecond, int nTZFlag): + OGR_F_SetFieldDateTimeEx(cogr_feature, iField, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) + + +cdef (int, int, int, int, int, int, float, int) get_field_as_datetime(void *cogr_feature, int iField): + cdef int retval + cdef int nYear = 0 + cdef int nMonth = 0 + cdef int nDay = 0 + cdef int nHour = 0 + cdef int nMinute = 0 + cdef float fSecond = 0.0 + cdef int nTZFlag = 0 + + retval = OGR_F_GetFieldAsDateTimeEx(cogr_feature, iField, &nYear, &nMonth, &nDay, &nHour, &nMinute, &fSecond, &nTZFlag) + + return (retval, nYear, nMonth, nDay, nHour, nMinute, fSecond, nTZFlag) diff --git a/fiona/collection.py b/fiona/collection.py index 61bfc7f5a..c2d791718 100644 --- a/fiona/collection.py +++ b/fiona/collection.py @@ -17,7 +17,8 @@ from fiona._env import get_gdal_release_name, get_gdal_version_tuple from fiona.env import env_ctx_if_needed from fiona.errors import FionaDeprecationWarning - from fiona.drvsupport import supported_drivers, driver_mode_mingdal + from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, + _driver_supports_field) from fiona.path import Path, vsi_path, parse_path from six import string_types, binary_type @@ -412,25 +413,23 @@ def _check_schema_driver_support(self): for field in self._schema["properties"].values(): field_type = field.split(":")[0] - if self._driver == "ESRI Shapefile": - if field_type == "datetime": - raise DriverSupportError("ESRI Shapefile does not support datetime fields") - elif field_type == "time": - raise DriverSupportError("ESRI Shapefile does not support time fields") - elif self._driver == "GPKG": - if field_type == "time": - raise DriverSupportError("GPKG does not support time fields") - elif gdal_version_major == 1: - if field_type == "datetime": - raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") - elif self._driver == "GeoJSON": - if gdal_version_major == 1: - if field_type == "date": - warnings.warn("GeoJSON driver in GDAL 1.x silently converts date to string in non-standard format") - elif field_type == "datetime": - warnings.warn("GeoJSON driver in GDAL 1.x silently converts datetime to string in non-standard format") - elif field_type == "time": - warnings.warn("GeoJSON driver in GDAL 1.x silently converts time to string") + + if not _driver_supports_field(self.driver, field_type): + if self.driver == 'GPKG' and gdal_version_major < 2 and field_type == "datetime": + raise DriverSupportError("GDAL 1.x GPKG driver does not support datetime fields") + else: + raise DriverSupportError("{driver} does not support {field_type} " + "fields".format(driver=self.driver, + field_type=field_type)) + elif field_type in {'time', 'datetime', 'date'} and _driver_converts_field_type_silently_to_str(self.driver, + field_type): + if self._driver == "GeoJSON" and gdal_version_major < 2 and field_type in {'datetime', 'date'}: + warnings.warn("GeoJSON driver in GDAL 1.x silently converts {} to string" + " in non-standard format".format(field_type)) + else: + warnings.warn("{driver} driver silently converts {field_type} " + "to string".format(driver=self.driver, + field_type=field_type)) def flush(self): """Flush the buffer.""" diff --git a/fiona/drvsupport.py b/fiona/drvsupport.py index f3854ac39..74742aeb4 100644 --- a/fiona/drvsupport.py +++ b/fiona/drvsupport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- - from fiona.env import Env +from fiona._env import get_gdal_version_num, calc_gdal_version_num # Here is the list of available drivers as (name, modes) tuples. Currently, @@ -63,7 +63,9 @@ # GML GML Yes Yes Yes (read support needs Xerces or libexpat) ("GML", "rw"), # GMT GMT Yes Yes Yes - ("GMT", "raw"), + ("GMT", "rw"), + # GMT renamed to OGR_GMT for GDAL 2.x + ("OGR_GMT", "rw"), # GPSBabel GPSBabel Yes Yes Yes (needs GPSBabel and GPX driver) # GPX GPX Yes Yes Yes (read support needs libexpat) ("GPX", "rw"), @@ -96,9 +98,11 @@ # multi-layer # ("OpenAir", "r"), # PCI Geomatics Database File PCIDSK No No Yes, using internal PCIDSK SDK (from GDAL 1.7.0) - ("PCIDSK", "raw"), + ("PCIDSK", "rw"), # PDS PDS No Yes Yes ("PDS", "r"), + # PDS renamed to OGR_PDS for GDAL 2.x + ("OGR_PDS", "r"), # PGDump PostgreSQL SQL dump Yes Yes Yes # PostgreSQL/PostGIS PostgreSQL/PostGIS Yes Yes No, needs PostgreSQL client library (libpq) # EPIInfo .REC REC No No Yes @@ -141,7 +145,7 @@ ]) -# Mininmal gdal version for different modes +# Minimal gdal version for different modes driver_mode_mingdal = { 'r': {'GPKG': (1, 11, 0), @@ -151,14 +155,27 @@ 'PCIDSK': (2, 0, 0), 'GeoJSONSeq': (2, 4, 0)}, - 'a': {'GMT': (2, 0, 0), - 'GPKG': (1, 11, 0), + 'a': {'GPKG': (1, 11, 0), 'GeoJSON': (2, 1, 0), - 'MapInfo File': (2, 0, 0), - 'PCIDSK': (2, 0, 0)} + 'MapInfo File': (2, 0, 0)} } +def _driver_supports_mode(driver, mode): + """ Returns True if driver supports mode, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if driver not in supported_drivers: + return False + if mode not in supported_drivers[driver]: + return False + if driver in driver_mode_mingdal[mode]: + if get_gdal_version_num() < calc_gdal_version_num(*driver_mode_mingdal[mode][driver]): + return False + return True + + # Removes drivers in the supported_drivers dictionary that the # machine's installation of OGR due to how it is compiled. # OGR may not have optional libraries compiled or installed. @@ -176,3 +193,171 @@ def _filter_supported_drivers(): _filter_supported_drivers() + +# driver_converts_to_str contains field type, driver combinations that are silently converted to string +# None: field type is always converted to str +# (2, 0, 0): starting from gdal 2.0 field type is not converted to string +_driver_converts_to_str = { + 'time': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GPKG': None, + 'GMT': None, + 'OGR_GMT': None + }, + 'datetime': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GML': (3, 1, 0), + }, + 'date': { + 'CSV': None, + 'PCIDSK': None, + 'GeoJSON': (2, 0, 0), + 'GMT': None, + 'OGR_GMT': None, + 'GML': (3, 1, 0), + } +} + + +def _driver_converts_field_type_silently_to_str(driver, field_type): + """ Returns True if the driver converts the field_type silently to str, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _driver_converts_to_str and driver in _driver_converts_to_str[field_type]: + if _driver_converts_to_str[field_type][driver] is None: + return True + elif get_gdal_version_num() < calc_gdal_version_num(*_driver_converts_to_str[field_type][driver]): + return True + return False + + +# None: field type is never supported, (2, 0, 0) field type is supported starting with gdal 2.0 +_driver_field_type_unsupported = { + 'time': { + 'ESRI Shapefile': None, + 'GPKG': (2, 0, 0), + 'GPX': None, + 'GPSTrackMaker': None, + 'GML': (3, 1, 0), + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0) + }, + 'datetime': { + 'ESRI Shapefile': None, + 'GPKG': (2, 0, 0), + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0) + }, + 'date': { + 'GPX': None, + 'GPSTrackMaker': None, + 'DGN': None, + 'BNA': None, + 'DXF': None, + 'PCIDSK': (2, 1, 0) + } +} + + +def _driver_supports_field(driver, field_type): + """ Returns True if the driver supports the field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _driver_field_type_unsupported and driver in _driver_field_type_unsupported[field_type]: + if _driver_field_type_unsupported[field_type][driver] is None: + return False + elif get_gdal_version_num() < calc_gdal_version_num(*_driver_field_type_unsupported[field_type][driver]): + return False + + return True + + +# None: field type never supports timezones, (2, 0, 0): field type supports timezones with GDAL 2.0.0 +_drivers_not_supporting_timezones = { + 'datetime': { + 'MapInfo File': None, + 'GPKG': (3, 1, 0), + 'GPSTrackMaker': (3, 1, 1) + }, + 'time': { + 'MapInfo File': None, + 'GPKG': None, + 'GPSTrackMaker': None, + 'GeoJSON': None, + 'GeoJSONSeq': None, + 'GML': None, + 'CSV': None, + 'GMT': None, + 'OGR_GMT': None + } +} + + +def _driver_supports_timezones(driver, field_type): + """ Returns True if the driver supports timezones for field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if field_type in _drivers_not_supporting_timezones and driver in _drivers_not_supporting_timezones[field_type]: + if _drivers_not_supporting_timezones[field_type][driver] is None: + return False + elif get_gdal_version_num() < calc_gdal_version_num(*_drivers_not_supporting_timezones[field_type][driver]): + return False + return True + + +# None: driver never supports timezones, (2, 0, 0): driver supports timezones with GDAL 2.0.0 +_drivers_not_supporting_milliseconds = { + 'GPSTrackMaker': None +} + + +def _driver_supports_milliseconds(driver): + """ Returns True if the driver supports milliseconds, False otherwise + + Note: this function is not part of Fiona's public API. + """ + # GDAL 2.0 introduced support for milliseconds + if get_gdal_version_num() < calc_gdal_version_num(2, 0, 0): + return False + + if driver in _drivers_not_supporting_milliseconds: + if _drivers_not_supporting_milliseconds[driver] is None: + return False + elif calc_gdal_version_num(*_drivers_not_supporting_milliseconds[driver]) < get_gdal_version_num(): + return False + + return True + + +# None: field type never supports unknown timezones, (2, 0, 0): field type supports unknown timezones with GDAL 2.0.0 +_drivers_not_supporting_unknown_timezone = { + 'datetime': + {'GPKG': None, + 'GPX': (2, 4, 0) + } +} + + +def _driver_supports_unknown_timezones(driver, field_type): + """ Returns True if the driver supports timezones for field_type, False otherwise + + Note: this function is not part of Fiona's public API. + """ + if (field_type in _drivers_not_supporting_unknown_timezone and + driver in _drivers_not_supporting_unknown_timezone[field_type]): + if _drivers_not_supporting_unknown_timezone[field_type][driver] is None: + return False + elif get_gdal_version_num() < calc_gdal_version_num(*_drivers_not_supporting_unknown_timezone[field_type][driver]): + return False + return True diff --git a/fiona/ogrext.pyx b/fiona/ogrext.pyx index 043c81631..59710b23a 100644 --- a/fiona/ogrext.pyx +++ b/fiona/ogrext.pyx @@ -22,7 +22,7 @@ from fiona._geometry cimport ( from fiona._err cimport exc_wrap_int, exc_wrap_pointer, exc_wrap_vsilfile import fiona -from fiona._env import GDALVersion, get_gdal_version_num +from fiona._env import GDALVersion, get_gdal_version_num, calc_gdal_version_num from fiona._err import cpl_errs, FionaNullPointerError, CPLE_BaseError, CPLE_OpenFailedError from fiona._geometry import GEOMETRY_TYPES from fiona import compat @@ -41,7 +41,8 @@ from fiona._shim cimport is_field_null, osr_get_name, osr_set_traditional_axis_m from libc.stdlib cimport malloc, free from libc.string cimport strcmp from cpython cimport PyBytes_FromStringAndSize, PyBytes_AsString - +from fiona.drvsupport import _driver_supports_timezones + cdef extern from "ogr_api.h" nogil: @@ -117,6 +118,15 @@ def _bounds(geometry): cdef int GDAL_VERSION_NUM = get_gdal_version_num() +class TZ(datetime.tzinfo): + + def __init__(self, minutes): + self.minutes = minutes + + def utcoffset(self, dt): + return datetime.timedelta(minutes=self.minutes) + + # Feature extension classes and functions follow. cdef class FeatureBuilder: @@ -151,13 +161,6 @@ cdef class FeatureBuilder: """ cdef void *fdefn = NULL cdef int i - cdef int y = 0 - cdef int m = 0 - cdef int d = 0 - cdef int hh = 0 - cdef int mm = 0 - cdef int ss = 0 - cdef int tz = 0 cdef unsigned char *data = NULL cdef int l cdef int retval @@ -239,16 +242,26 @@ cdef class FeatureBuilder: props[key] = val elif fieldtype in (FionaDateType, FionaTimeType, FionaDateTimeType): - retval = OGR_F_GetFieldAsDateTime( - feature, i, &y, &m, &d, &hh, &mm, &ss, &tz) + retval, y, m, d, hh, mm, ss, tz = get_field_as_datetime(feature, i) + + ms, ss = math.modf(ss) + ss = int(ss) + ms = int(round(ms * 10**6)) + + # OGR_F_GetFieldAsDateTimeEx: (0=unknown, 1=localtime, 100=GMT, see data model for details) + # CPLParseRFC822DateTime: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL + tzinfo = None + if tz > 1: + tz_minutes = (tz - 100) * 15 + tzinfo = TZ(tz_minutes) + try: if fieldtype is FionaDateType: props[key] = datetime.date(y, m, d).isoformat() elif fieldtype is FionaTimeType: - props[key] = datetime.time(hh, mm, ss).isoformat() + props[key] = datetime.time(hh, mm, ss, ms, tzinfo).isoformat() else: - props[key] = datetime.datetime( - y, m, d, hh, mm, ss).isoformat() + props[key] = datetime.datetime(y, m, d, hh, mm, ss, ms, tzinfo).isoformat() except ValueError as err: log.exception(err) props[key] = None @@ -367,32 +380,62 @@ cdef class OGRFeatureBuilder: elif isinstance(value, float): OGR_F_SetFieldDouble(cogr_feature, i, value) - elif (isinstance(value, string_types) - and schema_type in ['date', 'time', 'datetime']): - if schema_type == 'date': - y, m, d, hh, mm, ss, ff = parse_date(value) - elif schema_type == 'time': - y, m, d, hh, mm, ss, ff = parse_time(value) + elif schema_type in ['date', 'time', 'datetime'] and value is not None: + if isinstance(value, string_types): + if schema_type == 'date': + y, m, d, hh, mm, ss, ms, tz = parse_date(value) + elif schema_type == 'time': + y, m, d, hh, mm, ss, ms, tz = parse_time(value) + else: + y, m, d, hh, mm, ss, ms, tz = parse_datetime(value) + elif (isinstance(value, datetime.date) and schema_type == 'date'): + y, m, d = value.year, value.month, value.day + hh = mm = ss = ms = 0 + tz = None + elif (isinstance(value, datetime.datetime) and schema_type == 'datetime'): + y, m, d = value.year, value.month, value.day + hh, mm, ss, ms = value.hour, value.minute, value.second, value.microsecond + if value.utcoffset() is None: + tz = None + else: + tz = value.utcoffset().total_seconds() / 60 + elif (isinstance(value, datetime.time) and schema_type == 'time'): + y = m = d = 0 + hh, mm, ss, ms = value.hour, value.minute, value.second, value.microsecond + if value.utcoffset() is None: + tz = None + else: + tz = value.utcoffset().total_seconds() / 60 + + # Convert to UTC if driver does not support timezones + if tz is not None and not _driver_supports_timezones(collection.driver, schema_type): + + if schema_type == 'datetime': + d_tz = datetime.datetime(y, m, d, hh, mm, ss, int(ms), TZ(tz)) + d_utc = d_tz - d_tz.utcoffset() + y, m, d = d_utc.year, d_utc.month, d_utc.day + hh, mm, ss, ms = d_utc.hour, d_utc.minute, d_utc.second, d_utc.microsecond + tz = 0 + del d_utc, d_tz + elif schema_type == 'time': + d_tz = datetime.datetime(1900, 1, 1, hh, mm, ss, int(ms), TZ(tz)) + d_utc = d_tz - d_tz.utcoffset() + y = m = d = 0 + hh, mm, ss, ms = d_utc.hour, d_utc.minute, d_utc.second, d_utc.microsecond + tz = 0 + del d_utc, d_tz + + # tzinfo: (0=unknown, 100=GMT, 101=GMT+15minute, 99=GMT-15minute), or NULL + if tz is not None: + tzinfo = int(tz / 15.0 + 100) else: - y, m, d, hh, mm, ss, ff = parse_datetime(value) - OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.date) - and schema_type == 'date'): - y, m, d = value.year, value.month, value.day - OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, 0, 0, 0, 0) - elif (isinstance(value, datetime.datetime) - and schema_type == 'datetime'): - y, m, d = value.year, value.month, value.day - hh, mm, ss = value.hour, value.minute, value.second - OGR_F_SetFieldDateTime( - cogr_feature, i, y, m, d, hh, mm, ss, 0) - elif (isinstance(value, datetime.time) - and schema_type == 'time'): - hh, mm, ss = value.hour, value.minute, value.second - OGR_F_SetFieldDateTime( - cogr_feature, i, 0, 0, 0, hh, mm, ss, 0) + tzinfo = 0 + + # Add microseconds to seconds + ss += ms / 10**6 + + set_field_datetime(cogr_feature, i, y, m, d, hh, mm, ss, tzinfo) + elif isinstance(value, bytes) and schema_type == "bytes": string_c = value OGR_F_SetFieldBinary(cogr_feature, i, len(value), diff --git a/fiona/ogrext2.pxd b/fiona/ogrext2.pxd index 24acac8b9..f7754c529 100644 --- a/fiona/ogrext2.pxd +++ b/fiona/ogrext2.pxd @@ -246,7 +246,7 @@ cdef extern from "ogr_api.h": void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) - int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) @@ -256,7 +256,7 @@ cdef extern from "ogr_api.h": int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) - void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) diff --git a/fiona/ogrext3.pxd b/fiona/ogrext3.pxd index 31d32799d..041b9eb29 100644 --- a/fiona/ogrext3.pxd +++ b/fiona/ogrext3.pxd @@ -247,7 +247,7 @@ cdef extern from "ogr_api.h": void OGR_F_Destroy (void *feature) long OGR_F_GetFID (void *feature) int OGR_F_IsFieldSet (void *feature, int n) - int OGR_F_GetFieldAsDateTime (void *feature, int n, int *y, int *m, int *d, int *h, int *m, int *s, int *z) + int OGR_F_GetFieldAsDateTimeEx (void *feature, int n, int *y, int *m, int *d, int *h, int *m, float *s, int *z) double OGR_F_GetFieldAsDouble (void *feature, int n) int OGR_F_GetFieldAsInteger (void *feature, int n) char * OGR_F_GetFieldAsString (void *feature, int n) @@ -257,7 +257,7 @@ cdef extern from "ogr_api.h": int OGR_F_GetFieldIndex (void *feature, char *name) void * OGR_F_GetGeometryRef (void *feature) void * OGR_F_StealGeometry (void *feature) - void OGR_F_SetFieldDateTime (void *feature, int n, int y, int m, int d, int hh, int mm, int ss, int tz) + void OGR_F_SetFieldDateTimeEx (void *feature, int n, int y, int m, int d, int hh, int mm, float ss, int tz) void OGR_F_SetFieldDouble (void *feature, int n, double value) void OGR_F_SetFieldInteger (void *feature, int n, int value) void OGR_F_SetFieldString (void *feature, int n, char *value) diff --git a/fiona/rfc3339.py b/fiona/rfc3339.py index 3f6a43000..bc7e39629 100644 --- a/fiona/rfc3339.py +++ b/fiona/rfc3339.py @@ -4,77 +4,135 @@ # writing will convert times with a timezone offset to GMT (Z) and otherwise # will write times with the unknown zone. -import datetime import logging import re log = logging.getLogger("Fiona") + # Fiona's 'date', 'time', and 'datetime' types are sub types of 'str'. + class FionaDateType(str): """Dates without time.""" + class FionaTimeType(str): """Times without dates.""" + class FionaDateTimeType(str): """Dates and times.""" + pattern_date = re.compile(r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)") pattern_time = re.compile( - r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?" ) + r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") pattern_datetime = re.compile( - r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?" ) + r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") + class group_accessor(object): def __init__(self, m): self.match = m + def group(self, i): try: return self.match.group(i) or 0 except IndexError: return 0 + def parse_time(text): - """Given a RFC 3339 time, returns a tz-naive datetime tuple""" + """ Given a time, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + + """ match = re.search(pattern_time, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) + + if g.group(8) == '-': + tz = -1.0 * (int(g.group(9)) * 60 + int(g.group(11))) + elif g.group(8) == '+': + tz = int(g.group(9)) * 60 + int(g.group(11)) + else: + tz = None + return (0, 0, 0, - int(g.group(1)), - int(g.group(3)), - int(g.group(5)), - 1000000.0*float(g.group(6)) ) + int(g.group(1)), + int(g.group(3)), + int(g.group(5)), + int(1000000.0 * float(g.group(6))), + tz + ) + def parse_date(text): - """Given a RFC 3339 date, returns a tz-naive datetime tuple""" + """Given a date, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + """ match = re.search(pattern_date, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) return ( - int(g.group(1)), - int(g.group(3)), + int(g.group(1)), + int(g.group(3)), int(g.group(5)), - 0, 0, 0, 0.0 ) + 0, 0, 0, 0, None) + def parse_datetime(text): - """Given a RFC 3339 datetime, returns a tz-naive datetime tuple""" + """Given a datetime, returns a datetime tuple + + Parameters + ---------- + text: string to be parsed + + Returns + ------- + (int, int , int, int, int, int, int, int): + datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) + """ match = re.search(pattern_datetime, text) if match is None: raise ValueError("Time data '%s' does not match pattern" % text) g = group_accessor(match) log.debug("Match groups: %s", match.groups()) + + if g.group(14) == '-': + tz = -1.0 * (int(g.group(15)) * 60 + int(g.group(17))) + elif g.group(14) == '+': + tz = int(g.group(15)) * 60 + int(g.group(17)) + else: + tz = None + return ( - int(g.group(1)), - int(g.group(3)), + int(g.group(1)), + int(g.group(3)), int(g.group(5)), - int(g.group(7)), - int(g.group(9)), - int(g.group(11)), - 1000000.0*float(g.group(12)) ) - + int(g.group(7)), + int(g.group(9)), + int(g.group(11)), + int(1000000.0 * float(g.group(12))), + tz) diff --git a/requirements-dev.txt b/requirements-dev.txt index 0cdc06a6b..a5976e5c5 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,3 +7,4 @@ pytest-cov==2.8.1 setuptools==41.6.0 boto3==1.9.19 wheel==0.33.6 +pytz==2020.1 diff --git a/tests/conftest.py b/tests/conftest.py index 5b4513097..6e0a08c90 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,11 +6,12 @@ import shutil import tarfile import zipfile - +from collections import OrderedDict from click.testing import CliRunner import pytest import fiona +from fiona.crs import from_epsg from fiona.env import GDALVersion driver_extensions = {'DXF': 'dxf', @@ -25,6 +26,7 @@ 'GeoJSON': 'json', 'GeoJSONSeq': 'geojsons', 'GMT': 'gmt', + 'OGR_GMT': 'gmt', 'BNA': 'bna'} @@ -316,3 +318,127 @@ def unittest_path_coutwildrnp_shp(path_coutwildrnp_shp, request): """Makes shapefile path available to unittest tests""" request.cls.path_coutwildrnp_shp = path_coutwildrnp_shp + +@pytest.fixture() +def testdata_generator(): + """ Helper function to create test data sets for ideally all supported drivers + """ + + def get_schema(driver): + special_schemas = {'CSV': {'geometry': None, 'properties': OrderedDict([('position', 'int')])}, + 'BNA': {'geometry': 'Point', 'properties': {}}, + 'DXF': {'properties': OrderedDict( + [('Layer', 'str'), + ('SubClasses', 'str'), + ('Linetype', 'str'), + ('EntityHandle', 'str'), + ('Text', 'str')]), + 'geometry': 'Point'}, + 'GPX': {'geometry': 'Point', + 'properties': OrderedDict([('ele', 'float'), ('time', 'datetime')])}, + 'GPSTrackMaker': {'properties': OrderedDict([]), 'geometry': 'Point'}, + 'DGN': {'properties': OrderedDict([]), 'geometry': 'LineString'}, + 'MapInfo File': {'geometry': 'Point', 'properties': OrderedDict([('position', 'str')])} + } + + return special_schemas.get(driver, {'geometry': 'Point', 'properties': OrderedDict([('position', 'int')])}) + + def get_crs(driver): + special_crs = {'MapInfo File': from_epsg(4326)} + return special_crs.get(driver, None) + + def get_records(driver, range): + special_records1 = {'CSV': [{'geometry': None, 'properties': {'position': i}} for i in range], + 'BNA': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {}} + for i + in range], + 'DXF': [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': OrderedDict( + [('Layer', '0'), + ('SubClasses', 'AcDbEntity:AcDbPoint'), + ('Linetype', None), + ('EntityHandle', str(i + 20000)), + ('Text', None)])} for i in range], + 'GPX': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': {'ele': 0.0, 'time': '2020-03-24T16:08:40+00:00'}} for i + in range], + 'GPSTrackMaker': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': {}} for i in range], + 'DGN': [ + {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, + 'properties': {}} for i in range], + 'MapInfo File': [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, + 'properties': {'position': str(i)}} for i in range], + 'PCIDSK': [{'geometry': {'type': 'Point', 'coordinates': (0.0, float(i), 0.0)}, + 'properties': {'position': i}} for i in range] + } + return special_records1.get(driver, [ + {'geometry': {'type': 'Point', 'coordinates': (0.0, float(i))}, 'properties': {'position': i}} for i in + range]) + + def get_records2(driver, range): + special_records2 = {'DGN': [ + {'geometry': {'type': 'LineString', 'coordinates': [(float(i), 0.0), (0.0, 0.0)]}, + 'properties': OrderedDict( + [('Type', 4), + ('Level', 0), + ('GraphicGroup', 0), + ('ColorIndex', 0), + ('Weight', 0), + ('Style', 0), + ('EntityNum', None), + ('MSLink', None), + ('Text', None)])} for i in range], + } + return special_records2.get(driver, get_records(driver, range)) + + def test_equal(driver, val_in, val_out): + is_good = True + is_good = is_good and val_in['geometry'] == val_out['geometry'] + for key in val_in['properties']: + if key in val_out['properties']: + is_good = is_good and str(val_in['properties'][key]) == str(val_out['properties'][key]) + else: + is_good = False + return is_good + + def _testdata_generator(driver, range1, range2): + """ Generate test data and helper methods for a specific driver. Each set of generated set of records + contains the position specified with range. These positions are either encoded as field or in the geometry + of the record, depending of the driver characteristics. + + Parameters + ---------- + driver: str + Name of drive to generate tests for + range1: list of integer + Range of positions for first set of records + range2: list of integer + Range of positions for second set of records + + Returns + ------- + schema + A schema for the records + crs + A crs for the records + records1 + A set of records containing the positions of range1 + records2 + A set of records containing the positions of range2 + test_equal + A function that returns True if the geometry is equal between the generated records and a record and if + the properties of the generated records can be found in a record + """ + return get_schema(driver), get_crs(driver), get_records(driver, range1), get_records2(driver, range2),\ + test_equal + + return _testdata_generator + + +@pytest.fixture(scope='session') +def path_test_tz_geojson(data_dir): + """Path to ```test_tz.geojson``""" + return os.path.join(data_dir, 'test_tz.geojson') diff --git a/tests/data/test_tz.geojson b/tests/data/test_tz.geojson new file mode 100644 index 000000000..cb4337692 --- /dev/null +++ b/tests/data/test_tz.geojson @@ -0,0 +1,18 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "test": "2015-04-22T00:00:00+07:00" + }, + "geometry": { + "type": "Point", + "coordinates": [ + -79.4, + 43.6 + ] + } + } + ] +} diff --git a/tests/test_collection.py b/tests/test_collection.py index da515a70f..4319ce145 100644 --- a/tests/test_collection.py +++ b/tests/test_collection.py @@ -877,7 +877,7 @@ def test_collection_zip_http(): ) assert ( ds.path - == "/vsizip/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip", + == "/vsizip/vsicurl/https://raw.githubusercontent.com/Toblerity/Fiona/master/tests/data/coutwildrnp.zip" ) assert len(ds) == 67 diff --git a/tests/test_datetime.py b/tests/test_datetime.py index 9fda58090..98e734e90 100644 --- a/tests/test_datetime.py +++ b/tests/test_datetime.py @@ -2,267 +2,690 @@ See also test_rfc3339.py for datetime parser tests. """ +from collections import OrderedDict import fiona +from fiona._env import get_gdal_version_num, calc_gdal_version_num import pytest -import tempfile, shutil -import os from fiona.errors import DriverSupportError -from .conftest import requires_gpkg - -GDAL_MAJOR_VER = fiona.get_gdal_version_num() // 1000000 - -GEOMETRY_TYPE = "Point" -GEOMETRY_EXAMPLE = {"type": "Point", "coordinates": [1, 2]} - -DRIVER_FILENAME = { - "ESRI Shapefile": "test.shp", - "GPKG": "test.gpkg", - "GeoJSON": "test.geojson", - "MapInfo File": "test.tab", -} - -DATE_EXAMPLE = "2018-03-25" -DATETIME_EXAMPLE = "2018-03-25T22:49:05" -TIME_EXAMPLE = "22:49:05" - -class TestDateFieldSupport: - def write_data(self, driver): - filename = DRIVER_FILENAME[driver] - temp_dir = tempfile.mkdtemp() - path = os.path.join(temp_dir, filename) - schema = { - "geometry": GEOMETRY_TYPE, - "properties": { - "date": "date", - } - } - records = [ - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "date": DATE_EXAMPLE, - } - }, - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "date": None, - } - }, - ] - with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: - collection.writerecords(records) - - with fiona.Env(), fiona.open(path, "r") as collection: - schema = collection.schema - features = list(collection) - - shutil.rmtree(temp_dir) - - return schema, features - - def test_shapefile(self): - driver = "ESRI Shapefile" - schema, features = self.write_data(driver) - - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - assert features[1]["properties"]["date"] is None - - @requires_gpkg - def test_gpkg(self): - driver = "GPKG" - schema, features = self.write_data(driver) - - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - assert features[1]["properties"]["date"] is None - - def test_geojson(self): - # GDAL 1: date field silently converted to string - # GDAL 1: date string format uses / instead of - - driver = "GeoJSON" - schema, features = self.write_data(driver) - - if GDAL_MAJOR_VER >= 2: - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE +from fiona.rfc3339 import parse_time, parse_datetime +from .conftest import get_temp_filename +from fiona.env import GDALVersion +import datetime +from fiona.drvsupport import (supported_drivers, driver_mode_mingdal, _driver_converts_field_type_silently_to_str, + _driver_supports_field, _driver_converts_to_str, _driver_supports_timezones, + _driver_supports_milliseconds, _driver_supports_unknown_timezones) +import pytz +from pytz import timezone + +gdal_version = GDALVersion.runtime() + + +def get_schema(driver, field_type): + if driver == 'GPX': + return {'properties': OrderedDict([('ele', 'float'), + ('time', field_type)]), + 'geometry': 'Point'} + if driver == 'GPSTrackMaker': + return { + 'properties': OrderedDict([('name', 'str'), ('comment', 'str'), ('icon', 'int'), ('time', field_type)]), + 'geometry': 'Point'} + + return {"geometry": "Point", + "properties": {"datefield": field_type}} + + +def get_records(driver, values): + if driver == 'GPX': + return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {'ele': 0, "time": val}} for val in values] + if driver == 'GPSTrackMaker': + return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": OrderedDict([('name', ''), ('comment', ''), ('icon', 48), ('time', val)])} for + val in values] + + return [{"geometry": {"type": "Point", "coordinates": [1, 2]}, + "properties": {"datefield": val}} for val in values] + + +def get_schema_field(driver, schema): + if driver in {'GPX', 'GPSTrackMaker'}: + return schema["properties"]["time"] + return schema["properties"]["datefield"] + + +def get_field(driver, f): + if driver in {'GPX', 'GPSTrackMaker'}: + return f["properties"]["time"] + return f['properties']['datefield'] + + +class TZ(datetime.tzinfo): + + def __init__(self, minutes): + self.minutes = minutes + + def utcoffset(self, dt): + return datetime.timedelta(minutes=self.minutes) + + +def generate_testdata(field_type, driver): + """ Generate test cases for test_datefield + + Each test case has the format [(in_value1, true_value as datetime.*object), + (in_value2, true_value as datetime.*object), ...] + """ + + # Test data for 'date' data type + if field_type == 'date': + return [("2018-03-25", datetime.date(2018, 3, 25)), + (datetime.date(2018, 3, 25), datetime.date(2018, 3, 25))] + + # Test data for 'datetime' data type + if field_type == 'datetime': + return [("2018-03-25T22:49:05", datetime.datetime(2018, 3, 25, 22, 49, 5)), + (datetime.datetime(2018, 3, 25, 22, 49, 5), datetime.datetime(2018, 3, 25, 22, 49, 5)), + ("2018-03-25T22:49:05.23", datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), + (datetime.datetime(2018, 3, 25, 22, 49, 5, 230000), datetime.datetime(2018, 3, 25, 22, 49, 5, 230000)), + ("2018-03-25T22:49:05.123456", datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), + (datetime.datetime(2018, 3, 25, 22, 49, 5, 123456), datetime.datetime(2018, 3, 25, 22, 49, 5, 123000)), + ("2018-03-25T22:49:05+01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), + ("2018-03-25T22:49:05-01:30", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(90))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-90))), + (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')), + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich'))), + (datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')), + datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain'))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15))), + (datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("2018-03-25T22:49:05-23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("2018-03-25T22:49:05+23:45", datetime.datetime(2018, 3, 25, 22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] + + # Test data for 'time' data type + elif field_type == 'time': + return [("22:49:05", datetime.time(22, 49, 5)), + (datetime.time(22, 49, 5), datetime.time(22, 49, 5)), + ("22:49:05.23", datetime.time(22, 49, 5, 230000)), + (datetime.time(22, 49, 5, 230000), datetime.time(22, 49, 5, 230000)), + ("22:49:05.123456", datetime.time(22, 49, 5, 123000)), + (datetime.time(22, 49, 5, 123456), datetime.time(22, 49, 5, 123000)), + ("22:49:05+01:30", datetime.time(22, 49, 5, tzinfo=TZ(90))), + ("22:49:05-01:30", datetime.time(22, 49, 5, tzinfo=TZ(-90))), + (datetime.time(22, 49, 5, tzinfo=TZ(90)), datetime.time(22, 49, 5, tzinfo=TZ(90))), + (datetime.time(22, 49, 5, tzinfo=TZ(-90)), datetime.time(22, 49, 5, tzinfo=TZ(-90))), + (datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)), + datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15))), + (datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15)), + datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("22:49:05-23:45", datetime.time(22, 49, 5, tzinfo=TZ(-60 * 24 + 15))), + ("22:49:05+23:45", datetime.time(22, 49, 5, tzinfo=TZ(60 * 24 - 15)))] + + +def compare_datetimes_utc(d1, d2): + """ Test if two time objects are the same. Native times are assumed to be UTC""" + + if d1.tzinfo is None: + d1 = d1.replace(tzinfo=TZ(0)) + + if d2.tzinfo is None: + d2 = d2.replace(tzinfo=TZ(0)) + + return d1 == d2 + + +def test_compare_datetimes_utc(): + """ Test compare_datetimes_utc """ + d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60)) + d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60)) + d2 = datetime.datetime(2020, 1, 21, 11, 30, 0, tzinfo=TZ(0)) + assert not d1 == d2 + assert not compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 13, 0, 0, tzinfo=TZ(60)) + d2 = datetime.datetime(2020, 1, 21, 5, 0, 0, tzinfo=TZ(-60 * 7)) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d2 = datetime.datetime(2020, 1, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('Europe/Zurich')) + d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('US/Mountain')) + assert d1 == d2 + assert compare_datetimes_utc(d1, d2) + + +def convert_time_to_utc(d): + """ Convert datetime.time object to UTC""" + d = datetime.datetime(1900, 1, 1, d.hour, d.minute, d.second, d.microsecond, d.tzinfo) + d -= d.utcoffset() + return d.time() + + +def compare_times_utc(d1, d2): + """ Test if two datetime.time objects with fixed timezones have the same UTC time""" + if d1.tzinfo is not None: + d1 = convert_time_to_utc(d1) + + if d2.tzinfo is not None: + d2 = convert_time_to_utc(d2) + + return d1.replace(tzinfo=None) == d2.replace(tzinfo=None) + + +def test_compare_times_utc(): + """ + Test compare_times_utc + """ + d1 = datetime.time(12, 30, 0, tzinfo=TZ(60)) + d2 = datetime.time(11, 30, 0, tzinfo=TZ(0)) + assert compare_times_utc(d1, d2) + + d1 = datetime.time(12, 30, 0, tzinfo=TZ(-60)) + d2 = datetime.time(11, 30, 0, tzinfo=TZ(0)) + assert not compare_times_utc(d1, d2) + + d1 = datetime.time(13, 0, 0, tzinfo=TZ(60)) + d2 = datetime.time(5, 0, 0, tzinfo=TZ(-60 * 7)) + assert compare_times_utc(d1, d2) + + d1 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('MET')).timetz() + d2 = datetime.datetime(2020, 6, 21, 12, 0, 0, tzinfo=pytz.utc).astimezone(timezone('EST')).timetz() + assert compare_times_utc(d1, d2) + + +def get_tz_offset(d): + """ Returns a Timezone (sign, hours, minutes) tuples + + E.g.: for '2020-01-21T12:30:00+01:30' ('+', 1, 30) is returned + + """ + offset_minutes = d.utcoffset().total_seconds() / 60 + if offset_minutes < 0: + sign = "-" + else: + sign = "+" + hours = int(abs(offset_minutes) / 60) + minutes = int(abs(offset_minutes) % 60) + return sign, hours, minutes + + +def test_get_tz_offset(): + """ Test get_tz_offset""" + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(90)) + assert get_tz_offset(d) == ('+', 1, 30) + + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-90)) + assert get_tz_offset(d) == ('-', 1, 30) + + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(60 * 24 - 15)) + assert get_tz_offset(d) == ('+', 23, 45) + + d = datetime.datetime(2020, 1, 21, 12, 30, 0, tzinfo=TZ(-60 * 24 + 15)) + assert get_tz_offset(d) == ('-', 23, 45) + + +def generate_testcases(): + """ Generate test cases for drivers that support datefields, convert datefields to string or do not support + datefiels""" + _test_cases_datefield = [] + _test_cases_datefield_to_str = [] + _test_cases_datefield_not_supported = [] + + for field_type in ['time', 'datetime', 'date']: + # Select only driver that are capable of writing fields + for driver, raw in supported_drivers.items(): + if ('w' in raw and + (driver not in driver_mode_mingdal['w'] or + gdal_version >= GDALVersion(*driver_mode_mingdal['w'][driver][:2]))): + if _driver_supports_field(driver, field_type): + if _driver_converts_field_type_silently_to_str(driver, field_type): + _test_cases_datefield_to_str.append((driver, field_type)) + else: + _test_cases_datefield.append((driver, field_type)) + else: + _test_cases_datefield_not_supported.append((driver, field_type)) + + return _test_cases_datefield, _test_cases_datefield_to_str, _test_cases_datefield_not_supported + + +test_cases_datefield, test_cases_datefield_to_str, test_cases_datefield_not_supported = generate_testcases() + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield) +def test_datefield(tmpdir, driver, field_type): + """ + Test date, time, datetime field types. + """ + + def _validate(val, val_exp, field_type, driver): + + if field_type == 'date': + return val == val_exp.isoformat() + + elif field_type == 'datetime': + + # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other + # than UTC to UTC. Thus, both the dateime read by Fiona, as well as expected value are first converted to + # UTC before compared. + + # Milliseconds + if _driver_supports_milliseconds(driver): + y, m, d, hh, mm, ss, ms, tz = parse_datetime(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) + return compare_datetimes_utc(val_d, val_exp) + else: + # No Milliseconds + y, m, d, hh, mm, ss, ms, tz = parse_datetime(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.datetime(y, m, d, hh, mm, ss, ms, tz) + return compare_datetimes_utc(val_d, val_exp.replace(microsecond=0)) + + elif field_type == 'time': + + # some drivers do not support timezones. In this case, Fiona converts datetime fields with a timezone other + # than UTC to UTC. Thus, both the time read by Fiona, as well as expected value are first converted to UTC + # before compared. + + # Milliseconds + if _driver_supports_milliseconds(driver): + y, m, d, hh, mm, ss, ms, tz = parse_time(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.time(hh, mm, ss, ms, tz) + return compare_times_utc(val_d, val_exp) + else: + # No Milliseconds + y, m, d, hh, mm, ss, ms, tz = parse_time(val) + if tz is not None: + tz = TZ(tz) + val_d = datetime.time(hh, mm, ss, ms, tz) + return compare_times_utc(val_d, val_exp.replace(microsecond=0)) + return False + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_exp = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + assert get_schema_field(driver, c.schema) == field_type + items = [get_field(driver, f) for f in c] + assert len(items) == len(values_in) + for val, val_exp in zip(items, values_exp): + assert _validate(val, val_exp, field_type, driver), \ + "{} does not match {}".format(val, val_exp.isoformat()) + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield_to_str) +def test_datefield_driver_converts_to_string(tmpdir, driver, field_type): + """ + Test handling of date, time, datetime for drivers that convert these types to string. + + As the formatting can be arbitrary, we only test if the elements of a date / datetime / time object + is included in the string. E.g. for the PCIDSK driver if hour 22 from date.time(22:49:05) is in + '0000/00/00 22:49:05'. + + """ + + def _validate(val, val_exp, field_type, driver): + + if field_type == 'date': + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val): + return True + elif field_type == 'datetime': + + if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: + val_exp = convert_time_to_utc(val_exp) + + # datetime fields can, depending on the driver, support: + # - Timezones + # - Milliseconds, respectively Microseconds + + # No timezone + if val_exp.utcoffset() is None: + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val): + return True + else: + # Microseconds + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val): + return True + # Milliseconds + elif (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val): + return True + # With timezone + else: + sign, hours, minutes = get_tz_offset(val_exp) + if minutes > 0: + tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, + hours=int(hours), + minutes=int(minutes)) + else: + tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) + print("tz", tz) + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + tz in val): + return True + else: + # Microseconds + if (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val and + tz in val): + return True + # Milliseconds + elif (str(val_exp.year) in val and + str(val_exp.month) in val and + str(val_exp.day) in val and + str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val and + tz in val): + return True + + elif field_type == 'time': + + # time fields can, depending on the driver, support: + # - Timezones + # - Milliseconds, respectively Microseconds + + if not _driver_supports_timezones(driver, field_type) and val_exp.utcoffset() is not None: + val_exp = convert_time_to_utc(val_exp) + + # No timezone + if val_exp.utcoffset() is None: + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val): + return True + else: + # Microseconds + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val): + return True + # Milliseconds + elif (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val): + return True + # With timezone + else: + + sign, hours, minutes = get_tz_offset(val_exp) + if minutes > 0: + tz = "{sign}{hours:02d}{minutes:02d}".format(sign=sign, + hours=int(hours), + minutes=int(minutes)) + else: + tz = "{sign}{hours:02d}".format(sign=sign, hours=int(hours)) + # No Milliseconds + if not _driver_supports_milliseconds(driver): + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + tz in val): + return True + else: + # Microseconds + if (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(val_exp.microsecond) in val and + tz in val): + return True + # Milliseconds + elif (str(val_exp.hour) in val and + str(val_exp.minute) in val and + str(val_exp.second) in val and + str(int(val_exp.microsecond / 1000)) in val + and tz in val): + return True + return False + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_exp = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_exp) + + with pytest.warns(UserWarning) as record: + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + assert len(record) == 1 + assert "silently converts" in record[0].message.args[0] + + with fiona.open(path, 'r') as c: + assert get_schema_field(driver, c.schema) == 'str' + items = [get_field(driver, f) for f in c] + assert len(items) == len(values_in) + for val, val_exp in zip(items, values_exp): + assert _validate(val, val_exp, field_type, driver), \ + "{} does not match {}".format(val, val_exp.isoformat()) + + +@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') +@pytest.mark.parametrize("driver,field_type", test_cases_datefield + test_cases_datefield_to_str) +def test_datefield_null(tmpdir, driver, field_type): + """ + Test handling of null values for date, time, datetime types for write capable drivers + """ + + def _validate(val, val_exp, field_type, driver): + if (driver == 'MapInfo File' and field_type == 'time' and + calc_gdal_version_num(2, 0, 0) <= get_gdal_version_num() < calc_gdal_version_num(3, 1, 1)): + return val == '00:00:00' + if val is None or val == '': + return True + return False + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in = [None] + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + items = [get_field(driver, f) for f in c] + assert len(items) == 1 + + assert _validate(items[0], None, field_type, driver), \ + "{} does not match {}".format(items[0], None) + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) +def test_datetime_field_unsupported(tmpdir, driver, field_type): + """ Test if DriverSupportError is raised for unsupported field_types""" + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_out = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + with pytest.raises(DriverSupportError): + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + +@pytest.mark.parametrize("driver, field_type", test_cases_datefield_not_supported) +def test_datetime_field_type_marked_not_supported_is_not_supported(tmpdir, driver, field_type, monkeypatch): + """ Test if a date/datetime/time field type marked as not not supported is really not supported + + Warning: Success of this test does not necessary mean that a field is not supported. E.g. errors can occour due to + special schema requirements of drivers. This test only covers the standard case. + + """ + + if driver == "BNA" and gdal_version < GDALVersion(2, 0): + # BNA driver segfaults with gdal 1.11 + return + + monkeypatch.delitem(fiona.drvsupport._driver_field_type_unsupported[field_type], driver) + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_out = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + is_good = True + try: + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + if not get_schema_field(driver, c.schema) == field_type: + is_good = False + items = [get_field(driver, f) for f in c] + for val_in, val_out in zip(items, values_out): + if not val_in == val_out: + is_good = False + except: + is_good = False + assert not is_good + + +def generate_tostr_testcases(): + """ Flatten driver_converts_to_str to a list of (field_type, driver) tuples""" + cases = [] + for field_type in _driver_converts_to_str: + for driver in _driver_converts_to_str[field_type]: + driver_supported = driver in supported_drivers + driver_can_write = (driver not in driver_mode_mingdal['w'] or + gdal_version >= GDALVersion(*driver_mode_mingdal['w'][driver][:2])) + field_supported = _driver_supports_field(driver, field_type) + converts_to_str = _driver_converts_field_type_silently_to_str(driver, field_type) + if driver_supported and driver_can_write and converts_to_str and field_supported: + cases.append((field_type, driver)) + return cases + + +@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') +@pytest.mark.parametrize("driver,field_type", test_cases_datefield_to_str) +def test_driver_marked_as_silently_converts_to_str_converts_silently_to_str(tmpdir, driver, field_type, monkeypatch): + """ Test if a driver and field_type is marked in fiona.drvsupport.driver_converts_to_str to convert to str really + silently converts to str + + If this test fails, it should be considered to replace the respective None value in + fiona.drvsupport.driver_converts_to_str with a GDALVersion(major, minor) value. + """ + + monkeypatch.delitem(fiona.drvsupport._driver_converts_to_str[field_type], driver) + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + values_in, values_out = zip(*generate_testdata(field_type, driver)) + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + assert get_schema_field(driver, c.schema) == 'str' + + +@pytest.mark.filterwarnings('ignore:.*driver silently converts *:UserWarning') +@pytest.mark.parametrize("driver,field_type", [(driver, field_type) for driver, field_type in + test_cases_datefield + test_cases_datefield_to_str + if not field_type == 'date']) +def test_no_unknown_timezone(tmpdir, driver, field_type): + """ Some driver do not support unknown timezones (TZFlag=0) and convert datetimes silently to UTC""" + + schema = get_schema(driver, field_type) + path = str(tmpdir.join(get_temp_filename(driver))) + + if field_type == 'datetime': + values_in = ['2020-03-24T16:08:40'] + elif field_type == 'time': + values_in = ['16:08:40'] + records = get_records(driver, values_in) + + with fiona.open(path, 'w', + driver=driver, + schema=schema) as c: + c.writerecords(records) + + with fiona.open(path, 'r') as c: + items = [get_field(driver, f) for f in c] + assert len(items) == 1 + + if _driver_supports_unknown_timezones(driver, field_type): + assert "+" not in items[0], "{} contains a timezone".format(items[0]) else: - assert schema["properties"]["date"] == "str" - assert features[0]["properties"]["date"] == "2018/03/25" - assert features[1]["properties"]["date"] is None - - def test_mapinfo(self): - driver = "MapInfo File" - schema, features = self.write_data(driver) - - assert schema["properties"]["date"] == "date" - assert features[0]["properties"]["date"] == DATE_EXAMPLE - assert features[1]["properties"]["date"] is None - - -class TestDatetimeFieldSupport: - def write_data(self, driver): - filename = DRIVER_FILENAME[driver] - temp_dir = tempfile.mkdtemp() - path = os.path.join(temp_dir, filename) - schema = { - "geometry": GEOMETRY_TYPE, - "properties": { - "datetime": "datetime", - } - } - records = [ - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "datetime": DATETIME_EXAMPLE, - } - }, - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "datetime": None, - } - }, - ] - with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: - collection.writerecords(records) - - with fiona.Env(), fiona.open(path, "r") as collection: - schema = collection.schema - features = list(collection) - - shutil.rmtree(temp_dir) - - return schema, features - - def test_shapefile(self): - # datetime is silently converted to date - driver = "ESRI Shapefile" - - with pytest.raises(DriverSupportError): - schema, features = self.write_data(driver) - - # assert schema["properties"]["datetime"] == "date" - # assert features[0]["properties"]["datetime"] == "2018-03-25" - # assert features[1]["properties"]["datetime"] is None - - @requires_gpkg - def test_gpkg(self): - # GDAL 1: datetime silently downgraded to date - driver = "GPKG" - - if GDAL_MAJOR_VER >= 2: - schema, features = self.write_data(driver) - assert schema["properties"]["datetime"] == "datetime" - assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE - assert features[1]["properties"]["datetime"] is None - else: - with pytest.raises(DriverSupportError): - schema, features = self.write_data(driver) - - def test_geojson(self): - # GDAL 1: datetime silently converted to string - # GDAL 1: date string format uses / instead of - - driver = "GeoJSON" - schema, features = self.write_data(driver) - - if GDAL_MAJOR_VER >= 2: - assert schema["properties"]["datetime"] == "datetime" - assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE - else: - assert schema["properties"]["datetime"] == "str" - assert features[0]["properties"]["datetime"] == "2018/03/25 22:49:05" - assert features[1]["properties"]["datetime"] is None - - def test_mapinfo(self): - driver = "MapInfo File" - schema, features = self.write_data(driver) - - assert schema["properties"]["datetime"] == "datetime" - assert features[0]["properties"]["datetime"] == DATETIME_EXAMPLE - assert features[1]["properties"]["datetime"] is None - - -class TestTimeFieldSupport: - def write_data(self, driver): - filename = DRIVER_FILENAME[driver] - temp_dir = tempfile.mkdtemp() - path = os.path.join(temp_dir, filename) - schema = { - "geometry": GEOMETRY_TYPE, - "properties": { - "time": "time", - } - } - records = [ - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "time": TIME_EXAMPLE, - } - }, - { - "geometry": GEOMETRY_EXAMPLE, - "properties": { - "time": None, - } - }, - ] - with fiona.Env(), fiona.open(path, "w", driver=driver, schema=schema) as collection: - collection.writerecords(records) - - with fiona.Env(), fiona.open(path, "r") as collection: - schema = collection.schema - features = list(collection) - - shutil.rmtree(temp_dir) - - return schema, features - - def test_shapefile(self): - # no support for time fields - driver = "ESRI Shapefile" - with pytest.raises(DriverSupportError): - self.write_data(driver) - - @requires_gpkg - def test_gpkg(self): - # GDAL 2: time field is silently converted to string - # GDAL 1: time field dropped completely - driver = "GPKG" - - with pytest.raises(DriverSupportError): - schema, features = self.write_data(driver) - - # if GDAL_MAJOR_VER >= 2: - # assert schema["properties"]["time"] == "str" - # assert features[0]["properties"]["time"] == TIME_EXAMPLE - # assert features[1]["properties"]["time"] is None - # else: - # assert "time" not in schema["properties"] - - def test_geojson(self): - # GDAL 1: time field silently converted to string - driver = "GeoJSON" - schema, features = self.write_data(driver) - - if GDAL_MAJOR_VER >= 2: - assert schema["properties"]["time"] == "time" - else: - assert schema["properties"]["time"] == "str" - assert features[0]["properties"]["time"] == TIME_EXAMPLE - assert features[1]["properties"]["time"] is None - - def test_mapinfo(self): - # GDAL 2: null time is converted to 00:00:00 (regression?) - driver = "MapInfo File" - schema, features = self.write_data(driver) - - assert schema["properties"]["time"] == "time" - assert features[0]["properties"]["time"] == TIME_EXAMPLE - if GDAL_MAJOR_VER >= 2: - assert features[1]["properties"]["time"] == "00:00:00" - else: - assert features[1]["properties"]["time"] is None + assert "+" in items[0], "{} contains no timezone".format(items[0]) + + +def test_read_timezone_geojson(path_test_tz_geojson): + """Test if timezones are read correctly""" + with fiona.open(path_test_tz_geojson) as c: + items = list(c) + assert items[0]['properties']['test'] == '2015-04-22T00:00:00+07:00' diff --git a/tests/test_drvsupport.py b/tests/test_drvsupport.py index 6aea364b3..4e792fa07 100644 --- a/tests/test_drvsupport.py +++ b/tests/test_drvsupport.py @@ -1,114 +1,11 @@ """Tests of driver support""" import pytest - from .conftest import requires_gdal24, get_temp_filename from fiona.drvsupport import supported_drivers, driver_mode_mingdal import fiona.drvsupport from fiona.env import GDALVersion from fiona.errors import DriverError -from collections import OrderedDict - -# Add drivers to blacklist while testing write or append -blacklist_write_drivers = {} -blacklist_append_drivers = {} - - -def get_schema(driver): - """ - Generate schema for each driver - """ - schemas = { - 'GPX': {'properties': OrderedDict([('ele', 'float'), - ('time', 'datetime')]), - 'geometry': 'Point'}, - 'GPSTrackMaker': {'properties': OrderedDict([]), - 'geometry': 'Point'}, - 'DXF': {'properties': OrderedDict( - [('Layer', 'str'), - ('SubClasses', 'str'), - ('Linetype', 'str'), - ('EntityHandle', 'str'), - ('Text', 'str')]), - 'geometry': 'Point'}, - 'CSV': {'properties': OrderedDict([('ele', 'float')]), - 'geometry': None}, - 'DGN': {'properties': OrderedDict([]), - 'geometry': 'LineString'} - } - default_schema = {'geometry': 'LineString', - 'properties': [('title', 'str')]} - return schemas.get(driver, default_schema) - - -def get_record1(driver): - """ - Generate first record to write depending on driver - """ - records = { - 'GPX': {'properties': OrderedDict([('ele', 386.3), - ('time', '2020-03-24T16:08:40')]), - 'geometry': {'type': 'Point', 'coordinates': (8.306711, 47.475623)}}, - 'GPSTrackMaker': {'properties': OrderedDict([]), - 'geometry': {'type': 'Point', 'coordinates': (8.306711, 47.475623)}}, - 'DXF': {'properties': OrderedDict( - [('Layer', '0'), - ('SubClasses', 'AcDbEntity:AcDbPoint'), - ('Linetype', None), - ('EntityHandle', '20000'), - ('Text', None)]), - 'geometry': {'type': 'Point', 'coordinates': (8.306711, 47.475623)}}, - 'CSV': {'properties': OrderedDict([('ele', 386.3)]), - 'geometry': None}, - 'DGN': {'properties': OrderedDict( - []), - 'geometry': {'type': 'LineString', 'coordinates': [ - (1.0, 0.0), (0.0, 0.0)]}} - } - - default_record = {'geometry': {'type': 'LineString', 'coordinates': [ - (1.0, 0.0), (0.0, 0.0)]}, 'properties': {'title': 'One'}} - - return records.get(driver, default_record) - - -def get_record2(driver): - """ - Generate second record to write depending on driver - """ - records = { - 'GPX': {'properties': OrderedDict([('ele', 386.3), - ('time', '2020-03-24T16:19:14')]), - 'geometry': {'type': 'Point', 'coordinates': (8.307451, 47.474996)}}, - 'GPSTrackMaker': {'properties': OrderedDict([]), - 'geometry': {'type': 'Point', 'coordinates': (8.307451, 47.474996)}}, - 'DXF': {'properties': OrderedDict( - [('Layer', '0'), - ('SubClasses', 'AcDbEntity:AcDbPoint'), - ('Linetype', None), - ('EntityHandle', '20000'), - ('Text', None)]), - 'geometry': {'type': 'Point', 'coordinates': (8.307451, 47.474996)}}, - 'CSV': {'properties': OrderedDict([('ele', 386.8)]), - 'geometry': None}, - 'DGN': {'properties': OrderedDict( - [('Type', 3), - ('Level', 0), - ('GraphicGroup', 0), - ('ColorIndex', 0), - ('Weight', 0), - ('Style', 0), - ('EntityNum', None), - ('MSLink', None), - ('Text', None)]), - 'geometry': {'type': 'LineString', 'coordinates': [ - (2.0, 0.0), (0.0, 0.0)]}} - } - - default_record = {'geometry': {'type': 'LineString', 'coordinates': [ - (2.0, 0.0), (0.0, 0.0)]}, 'properties': {'title': 'Two'}} - - return records.get(driver, default_record) @requires_gdal24 @@ -118,9 +15,8 @@ def test_geojsonseq(format): assert format in fiona.drvsupport.supported_drivers.keys() -@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'w' in raw - and driver not in blacklist_write_drivers]) -def test_write_or_driver_error(tmpdir, driver): +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'w' in raw]) +def test_write_or_driver_error(tmpdir, driver, testdata_generator): """ Test if write mode works. @@ -130,6 +26,7 @@ def test_write_or_driver_error(tmpdir, driver): # BNA driver segfaults with gdal 1.11 return + schema, crs, records1, _, test_equal = testdata_generator(driver, range(0, 10), []) path = str(tmpdir.join(get_temp_filename(driver))) if driver in driver_mode_mingdal['w'] and GDALVersion.runtime() < GDALVersion( @@ -139,27 +36,31 @@ def test_write_or_driver_error(tmpdir, driver): with pytest.raises(DriverError): with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: - c.write(get_record1(driver)) + crs=crs, + schema=schema) as c: + c.writerecords(records1) else: # Test if we can write with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: + crs=crs, + schema=schema) as c: - c.write(get_record1(driver)) + c.writerecords(records1) with fiona.open(path) as c: assert c.driver == driver - assert len([f for f in c]) == 1 + items = list(c) + assert len(items) == len(records1) + for val_in, val_out in zip(records1, items): + assert test_equal(driver, val_in, val_out), "in: {val_in}, out: {val_out}".format(val_in=val_in, + val_out=val_out) -@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys() - if driver not in blacklist_append_drivers - and driver in supported_drivers]) -def test_write_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypatch): +@pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['w'].keys()]) +def test_write_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, testdata_generator, monkeypatch): """ Test if driver really can't write for gdal < driver_mode_mingdal @@ -171,6 +72,7 @@ def test_write_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypat # BNA driver segfaults with gdal 1.11 return + schema, crs, records1, _, test_equal = testdata_generator(driver, range(0, 10), []) path = str(tmpdir.join(get_temp_filename(driver))) if driver in driver_mode_mingdal['w'] and GDALVersion.runtime() < GDALVersion( @@ -180,17 +82,17 @@ def test_write_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypat with pytest.raises(Exception): with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: - c.write(get_record1(driver)) + crs=crs, + schema=schema) as c: + c.writerecords(records1) -@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'a' in raw - and driver not in blacklist_append_drivers]) -def test_append_or_driver_error(tmpdir, driver): +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if 'a' in raw]) +def test_append_or_driver_error(tmpdir, testdata_generator, driver): """ Test if driver supports append mode. - + Some driver only allow a specific schema. These drivers can be excluded by adding them to blacklist_append_drivers. - + """ if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): @@ -198,6 +100,7 @@ def test_append_or_driver_error(tmpdir, driver): return path = str(tmpdir.join(get_temp_filename(driver))) + schema, crs, records1, records2, test_equal = testdata_generator(driver, range(0, 5), range(5, 10)) # If driver is not able to write, we cannot test append if driver in driver_mode_mingdal['w'] and GDALVersion.runtime() < GDALVersion( @@ -207,9 +110,10 @@ def test_append_or_driver_error(tmpdir, driver): # Create test file to append to with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: + crs=crs, + schema=schema) as c: - c.write(get_record1(driver)) + c.writerecords(records1) if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( *driver_mode_mingdal['a'][driver][:2]): @@ -218,27 +122,28 @@ def test_append_or_driver_error(tmpdir, driver): with pytest.raises(DriverError): with fiona.open(path, 'a', driver=driver) as c: - c.write(get_record2(driver)) + c.writerecords(records2) else: # Test if we can append with fiona.open(path, 'a', driver=driver) as c: - c.write(get_record2(driver)) + c.writerecords(records2) with fiona.open(path) as c: assert c.driver == driver - assert len([f for f in c]) == 2 + items = list(c) + assert len(items) == len(records1) + len(records2) + for val_in, val_out in zip(records1 + records2, items): + assert test_equal(driver, val_in, val_out), "in: {val_in}, out: {val_out}".format(val_in=val_in, + val_out=val_out) @pytest.mark.parametrize('driver', [driver for driver in driver_mode_mingdal['a'].keys() - if driver not in blacklist_append_drivers - and driver in supported_drivers]) -def test_append_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypatch): + if driver in supported_drivers]) +def test_append_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, testdata_generator, monkeypatch): """ Test if driver supports append mode. - Some driver only allow a specific schema. These drivers can be excluded by adding them to blacklist_append_drivers. - If this test fails, it should be considered to update driver_mode_mingdal in drvsupport.py. """ @@ -248,6 +153,7 @@ def test_append_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypa return path = str(tmpdir.join(get_temp_filename(driver))) + schema, crs, records1, records2, test_equal = testdata_generator(driver, range(0, 5), range(5, 10)) # If driver is not able to write, we cannot test append if driver in driver_mode_mingdal['w'] and GDALVersion.runtime() < GDALVersion( @@ -257,9 +163,10 @@ def test_append_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypa # Create test file to append to with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: + crs=crs, + schema=schema) as c: - c.write(get_record1(driver)) + c.writerecords(records1) if driver in driver_mode_mingdal['a'] and GDALVersion.runtime() < GDALVersion( *driver_mode_mingdal['a'][driver][:2]): @@ -270,23 +177,26 @@ def test_append_does_not_work_when_gdal_smaller_mingdal(tmpdir, driver, monkeypa with pytest.raises(Exception): with fiona.open(path, 'a', driver=driver) as c: - c.write(get_record2(driver)) + c.writerecords(records2) with fiona.open(path) as c: assert c.driver == driver - assert len([f for f in c]) == 2 + items = list(c) + assert len(items) == len(records1) + len(records2) + for val_in, val_out in zip(records1 + records2, items): + assert test_equal(driver, val_in, val_out) -@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if - raw == 'r' and driver not in blacklist_write_drivers]) -def test_no_write_driver_cannot_write(tmpdir, driver, monkeypatch): +@pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if raw == 'r']) +def test_no_write_driver_cannot_write(tmpdir, driver, testdata_generator, monkeypatch): """Test if read only driver cannot write - - If this test fails, it should be considered to enable write support for the respective driver in drvsupport.py. - + + If this test fails, it should be considered to enable write support for the respective driver in drvsupport.py. + """ monkeypatch.setitem(fiona.drvsupport.supported_drivers, driver, 'rw') + schema, crs, records1, _, test_equal = testdata_generator(driver, range(0, 5), []) if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): # BNA driver segfaults with gdal 1.11 @@ -297,15 +207,16 @@ def test_no_write_driver_cannot_write(tmpdir, driver, monkeypatch): with pytest.raises(Exception): with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: - c.write(get_record1(driver)) + crs=crs, + schema=schema) as c: + c.writerecords(records1) @pytest.mark.parametrize('driver', [driver for driver, raw in supported_drivers.items() if - 'w' in raw and 'a' not in raw and driver not in blacklist_append_drivers]) -def test_no_append_driver_cannot_append(tmpdir, driver, monkeypatch): + 'w' in raw and 'a' not in raw]) +def test_no_append_driver_cannot_append(tmpdir, driver, testdata_generator, monkeypatch): """ - Test if a driver that supports write cannot also append + Test if a driver that supports write and not append cannot also append If this test fails, it should be considered to enable append support for the respective driver in drvsupport.py. @@ -318,6 +229,7 @@ def test_no_append_driver_cannot_append(tmpdir, driver, monkeypatch): return path = str(tmpdir.join(get_temp_filename(driver))) + schema, crs, records1, records2, test_equal = testdata_generator(driver, range(0, 5), range(5, 10)) # If driver is not able to write, we cannot test append if driver in driver_mode_mingdal['w'] and GDALVersion.runtime() < GDALVersion( @@ -327,18 +239,27 @@ def test_no_append_driver_cannot_append(tmpdir, driver, monkeypatch): # Create test file to append to with fiona.open(path, 'w', driver=driver, - schema=get_schema(driver)) as c: + crs=crs, + schema=schema) as c: - c.write(get_record1(driver)) + c.writerecords(records1) - with pytest.raises(Exception): + is_good = True + try: with fiona.open(path, 'a', driver=driver) as c: - c.write(get_record2(driver)) + c.writerecords(records2) with fiona.open(path) as c: assert c.driver == driver - assert len([f for f in c]) == 2 + items = list(c) + is_good = is_good and len(items) == len(records1) + len(records2) + for val_in, val_out in zip(records1 + records2, items): + is_good = is_good and test_equal(driver, val_in, val_out) + except: + is_good = False + + assert not is_good def test_mingdal_drivers_are_supported(): diff --git a/tests/test_rfc3339.py b/tests/test_rfc3339.py index 5e01a64ab..ff8ed8a1a 100644 --- a/tests/test_rfc3339.py +++ b/tests/test_rfc3339.py @@ -12,7 +12,7 @@ class TestDateParse(object): def test_yyyymmdd(self): - assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0) + assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0, None) def test_error(self): with pytest.raises(ValueError): @@ -22,19 +22,22 @@ def test_error(self): class TestTimeParse(object): def test_hhmmss(self): - assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0) + assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0, None) def test_hhmm(self): - assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0) + assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0, None) def test_hhmmssff(self): - assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000) + assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000, None) def test_hhmmssz(self): - assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0) + assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0, None) def test_hhmmssoff(self): - assert parse_time("10:11:12-01:00") == (0, 0, 0, 10, 11, 12, 0.0) + assert parse_time("10:11:12-01:30") == (0, 0, 0, 10, 11, 12, 0.0, -90) + + def test_hhmmssoff2(self): + assert parse_time("10:11:12+01:30") == (0, 0, 0, 10, 11, 12, 0.0, 90) def test_error(self): with pytest.raises(ValueError): @@ -46,7 +49,17 @@ class TestDatetimeParse(object): def test_yyyymmdd(self): assert ( parse_datetime("2012-01-29T10:11:12") == - (2012, 1, 29, 10, 11, 12, 0.0)) + (2012, 1, 29, 10, 11, 12, 0.0, None)) + + def test_yyyymmddTZ(self): + assert ( + parse_datetime("2012-01-29T10:11:12+01:30") == + (2012, 1, 29, 10, 11, 12, 0.0, 90)) + + def test_yyyymmddTZ2(self): + assert ( + parse_datetime("2012-01-29T10:11:12-01:30") == + (2012, 1, 29, 10, 11, 12, 0.0, -90)) def test_error(self): with pytest.raises(ValueError): diff --git a/tests/test_slice.py b/tests/test_slice.py index 3ac8241c1..cb035068e 100644 --- a/tests/test_slice.py +++ b/tests/test_slice.py @@ -8,7 +8,7 @@ import fiona from fiona.errors import FionaDeprecationWarning from .conftest import get_temp_filename -from fiona.drvsupport import supported_drivers, driver_mode_mingdal +from fiona.drvsupport import supported_drivers, driver_mode_mingdal, _driver_supports_mode gdal_version = GDALVersion.runtime() @@ -43,9 +43,8 @@ def test_collection_iterator_next(path_coutwildrnp_shp): assert v['id'] == '5' -@pytest.fixture(scope="module", params=[driver for driver, raw in supported_drivers.items() if 'w' in raw - and (driver not in driver_mode_mingdal['w'] or - gdal_version >= GDALVersion(*driver_mode_mingdal['w'][driver][:2])) +@pytest.fixture(scope="module", params=[driver for driver in supported_drivers if + _driver_supports_mode(driver, 'w') and driver not in {'DGN', 'MapInfo File', 'GPSTrackMaker', 'GPX', 'BNA', 'DXF', 'GML'}]) def slice_dataset_path(request):