How to use the py4j.protocol.register_input_converter function in py4j

To help you get started, we’ve selected a few py4j examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github UCLA-VAST / blaze / spark-1.5.1 / python / pyspark / sql / types.py View on Github external
Date = JavaClass("java.sql.Date", gateway_client)
        return Date.valueOf(obj.strftime("%Y-%m-%d"))


class DatetimeConverter(object):
    def can_convert(self, obj):
        return isinstance(obj, datetime.datetime)

    def convert(self, obj, gateway_client):
        Timestamp = JavaClass("java.sql.Timestamp", gateway_client)
        return Timestamp(int(time.mktime(obj.timetuple())) * 1000 + obj.microsecond // 1000)


# datetime is a subclass of date, we should register DatetimeConverter first
register_input_converter(DatetimeConverter())
register_input_converter(DateConverter())


def _test():
    import doctest
    from pyspark.context import SparkContext
    from pyspark.sql import SQLContext
    globs = globals()
    sc = SparkContext('local[4]', 'PythonTest')
    globs['sc'] = sc
    globs['sqlContext'] = SQLContext(sc)
    (failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
    globs['sc'].stop()
    if failure_count:
        exit(-1)
github qubole / spark-on-lambda / python / pyspark / sql / types.py View on Github external
class DatetimeConverter(object):
    def can_convert(self, obj):
        return isinstance(obj, datetime.datetime)

    def convert(self, obj, gateway_client):
        Timestamp = JavaClass("java.sql.Timestamp", gateway_client)
        seconds = (calendar.timegm(obj.utctimetuple()) if obj.tzinfo
                   else time.mktime(obj.timetuple()))
        t = Timestamp(int(seconds) * 1000)
        t.setNanos(obj.microsecond * 1000)
        return t

# datetime is a subclass of date, we should register DatetimeConverter first
register_input_converter(DatetimeConverter())
register_input_converter(DateConverter())


def _test():
    import doctest
    from pyspark.context import SparkContext
    from pyspark.sql import SQLContext
    globs = globals()
    sc = SparkContext('local[4]', 'PythonTest')
    globs['sc'] = sc
    globs['sqlContext'] = SQLContext(sc)
    (failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
    globs['sc'].stop()
    if failure_count:
        exit(-1)
github qubole / spark-on-lambda / python / pyspark / sql / types.py View on Github external
class DatetimeConverter(object):
    def can_convert(self, obj):
        return isinstance(obj, datetime.datetime)

    def convert(self, obj, gateway_client):
        Timestamp = JavaClass("java.sql.Timestamp", gateway_client)
        seconds = (calendar.timegm(obj.utctimetuple()) if obj.tzinfo
                   else time.mktime(obj.timetuple()))
        t = Timestamp(int(seconds) * 1000)
        t.setNanos(obj.microsecond * 1000)
        return t

# datetime is a subclass of date, we should register DatetimeConverter first
register_input_converter(DatetimeConverter())
register_input_converter(DateConverter())


def _test():
    import doctest
    from pyspark.context import SparkContext
    from pyspark.sql import SQLContext
    globs = globals()
    sc = SparkContext('local[4]', 'PythonTest')
    globs['sc'] = sc
    globs['sqlContext'] = SQLContext(sc)
    (failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
    globs['sc'].stop()
    if failure_count:
        exit(-1)
github qubole / spark-on-lambda / python / pyspark / sql / types.py View on Github external
class DatetimeConverter(object):
    def can_convert(self, obj):
        return isinstance(obj, datetime.datetime)

    def convert(self, obj, gateway_client):
        Timestamp = JavaClass("java.sql.Timestamp", gateway_client)
        seconds = (calendar.timegm(obj.utctimetuple()) if obj.tzinfo
                   else time.mktime(obj.timetuple()))
        t = Timestamp(int(seconds) * 1000)
        t.setNanos(obj.microsecond * 1000)
        return t

# datetime is a subclass of date, we should register DatetimeConverter first
register_input_converter(DatetimeConverter())
register_input_converter(DateConverter())


def _test():
    import doctest
    from pyspark.context import SparkContext
    from pyspark.sql import SparkSession
    globs = globals()
    sc = SparkContext('local[4]', 'PythonTest')
    globs['sc'] = sc
    globs['spark'] = SparkSession.builder.getOrCreate()
    (failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
    globs['sc'].stop()
    if failure_count:
        exit(-1)
github qubole / spark-on-lambda / python / pyspark / sql / types.py View on Github external
class DatetimeConverter(object):
    def can_convert(self, obj):
        return isinstance(obj, datetime.datetime)

    def convert(self, obj, gateway_client):
        Timestamp = JavaClass("java.sql.Timestamp", gateway_client)
        seconds = (calendar.timegm(obj.utctimetuple()) if obj.tzinfo
                   else time.mktime(obj.timetuple()))
        t = Timestamp(int(seconds) * 1000)
        t.setNanos(obj.microsecond * 1000)
        return t

# datetime is a subclass of date, we should register DatetimeConverter first
register_input_converter(DatetimeConverter())
register_input_converter(DateConverter())


def _test():
    import doctest
    from pyspark.context import SparkContext
    from pyspark.sql import SparkSession
    globs = globals()
    sc = SparkContext('local[4]', 'PythonTest')
    globs['sc'] = sc
    globs['spark'] = SparkSession.builder.getOrCreate()
    (failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
    globs['sc'].stop()
    if failure_count:
        exit(-1)
github locationtech-labs / geopyspark / geopyspark / geotrellis / converters.py View on Github external
ScalaTemporalStrategy = JavaClass("geopyspark.geotrellis.SpaceTimePartitionStrategy", gateway_client)

        scala_time_unit = obj.time_unit.value

        if obj.time_resolution:
            scala_time_resolution = str(obj.time_resolution)
        else:
            scala_time_resolution = None

        return ScalaTemporalStrategy.apply(obj.num_partitions, obj.bits, scala_time_unit, scala_time_resolution)


register_input_converter(CellTypeConverter(), prepend=True)
register_input_converter(RasterizerOptionsConverter(), prepend=True)
register_input_converter(LayoutTypeConverter(), prepend=True)
register_input_converter(ResampleMethodConverter(), prepend=True)
register_input_converter(LayoutDefinitionConverter(), prepend=True)
register_input_converter(HashPartitionStrategyConverter(), prepend=True)
register_input_converter(SpatialPartitionStrategyConverter(), prepend=True)
register_input_converter(SpaceTimePartitionStrategyConverter(), prepend=True)
github bartdag / py4j / py4j-python / src / py4j / java_collections.py View on Github external
class MapConverter(object):
    def can_convert(self, object):
        return hasattr2(object, "keys") and hasattr2(object, "__getitem__")

    def convert(self, object, gateway_client):
        HashMap = JavaClass("java.util.HashMap", gateway_client)
        java_map = HashMap()
        for key in object.keys():
            java_map[key] = object[key]
        return java_map

register_input_converter(SetConverter())
register_input_converter(MapConverter())
register_input_converter(ListConverter())

register_output_converter(
    proto.MAP_TYPE, lambda target_id, gateway_client:
    JavaMap(target_id, gateway_client))
register_output_converter(
    proto.LIST_TYPE, lambda target_id, gateway_client:
    JavaList(target_id, gateway_client))
register_output_converter(
    proto.ARRAY_TYPE, lambda target_id, gateway_client:
    JavaArray(target_id, gateway_client))
register_output_converter(
    proto.SET_TYPE, lambda target_id, gateway_client:
    JavaSet(target_id, gateway_client))
register_output_converter(
    proto.ITERATOR_TYPE, lambda target_id, gateway_client:
    JavaIterator(target_id, gateway_client))
github locationtech-labs / geopyspark / geopyspark / geotrellis / converters.py View on Github external
if obj.time_resolution:
            scala_time_resolution = str(obj.time_resolution)
        else:
            scala_time_resolution = None

        return ScalaTemporalStrategy.apply(obj.num_partitions, obj.bits, scala_time_unit, scala_time_resolution)


register_input_converter(CellTypeConverter(), prepend=True)
register_input_converter(RasterizerOptionsConverter(), prepend=True)
register_input_converter(LayoutTypeConverter(), prepend=True)
register_input_converter(ResampleMethodConverter(), prepend=True)
register_input_converter(LayoutDefinitionConverter(), prepend=True)
register_input_converter(HashPartitionStrategyConverter(), prepend=True)
register_input_converter(SpatialPartitionStrategyConverter(), prepend=True)
register_input_converter(SpaceTimePartitionStrategyConverter(), prepend=True)
github locationtech-labs / geopyspark / geopyspark / geotrellis / converters.py View on Github external
def convert(self, obj, gateway_client):

        ScalaTemporalStrategy = JavaClass("geopyspark.geotrellis.SpaceTimePartitionStrategy", gateway_client)

        scala_time_unit = obj.time_unit.value

        if obj.time_resolution:
            scala_time_resolution = str(obj.time_resolution)
        else:
            scala_time_resolution = None

        return ScalaTemporalStrategy.apply(obj.num_partitions, obj.bits, scala_time_unit, scala_time_resolution)


register_input_converter(CellTypeConverter(), prepend=True)
register_input_converter(RasterizerOptionsConverter(), prepend=True)
register_input_converter(LayoutTypeConverter(), prepend=True)
register_input_converter(ResampleMethodConverter(), prepend=True)
register_input_converter(LayoutDefinitionConverter(), prepend=True)
register_input_converter(HashPartitionStrategyConverter(), prepend=True)
register_input_converter(SpatialPartitionStrategyConverter(), prepend=True)
register_input_converter(SpaceTimePartitionStrategyConverter(), prepend=True)
github locationtech-labs / geopyspark / geopyspark / geotrellis / converters.py View on Github external
ScalaTemporalStrategy = JavaClass("geopyspark.geotrellis.SpaceTimePartitionStrategy", gateway_client)

        scala_time_unit = obj.time_unit.value

        if obj.time_resolution:
            scala_time_resolution = str(obj.time_resolution)
        else:
            scala_time_resolution = None

        return ScalaTemporalStrategy.apply(obj.num_partitions, obj.bits, scala_time_unit, scala_time_resolution)


register_input_converter(CellTypeConverter(), prepend=True)
register_input_converter(RasterizerOptionsConverter(), prepend=True)
register_input_converter(LayoutTypeConverter(), prepend=True)
register_input_converter(ResampleMethodConverter(), prepend=True)
register_input_converter(LayoutDefinitionConverter(), prepend=True)
register_input_converter(HashPartitionStrategyConverter(), prepend=True)
register_input_converter(SpatialPartitionStrategyConverter(), prepend=True)
register_input_converter(SpaceTimePartitionStrategyConverter(), prepend=True)