An exception was thrown from the Python worker. Please see the stack trace below. Traceback (most recent call last): File "/usr/local/lib64/python3.7/site-packages/pendulum/parsing/__init__.py", line 132, in _parse text, dayfirst=options["day_first"], yearfirst=options["year_first"] File "/usr/local/lib/python3.7/site-packages/dateutil/parser/_parser.py", line 1368, in parse return DEFAULTPARSER.parse(timestr, **kwargs) File "/usr/local/lib/python3.7/site-packages/dateutil/parser/_parser.py", line 643, in parse raise ParserError("Unknown string format: %s", timestr) dateutil.parser._parser.ParserError: Unknown string format: None During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/worker.py", line 604, in main process() File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/worker.py", line 596, in process serializer.dump_stream(out_iter, outfile) File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/serializers.py", line 211, in dump_stream self.serializer.dump_stream(self._batched(iterator), stream) File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/serializers.py", line 132, in dump_stream for obj in iterator: File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/serializers.py", line 200, in _batched for item in iterator: File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/worker.py", line 450, in mapper result = tuple(f(*[a[o] for o in arg_offsets]) for (arg_offsets, f) in udfs) File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/worker.py", line 450, in result = tuple(f(*[a[o] for o in arg_offsets]) for (arg_offsets, f) in udfs) File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/worker.py", line 83, in return lambda *a: toInternal(f(*a)) File "/mnt/yarn/usercache/livy/appcache/application_1639628500827_0063/container_1639628500827_0063_01_000068/pyspark.zip/pyspark/util.py", line 87, in wrapper return f(*args, **kwargs) File "", line 71, in udf_to_utc_timezone File "/usr/local/lib64/python3.7/site-packages/pendulum/parser.py", line 29, in parse return _parse(text, **options) File "/usr/local/lib64/python3.7/site-packages/pendulum/parser.py", line 45, in _parse parsed = base_parse(text, **options) File "/usr/local/lib64/python3.7/site-packages/pendulum/parsing/__init__.py", line 74, in parse return _normalize(_parse(text, **_options), **_options) File "/usr/local/lib64/python3.7/site-packages/pendulum/parsing/__init__.py", line 135, in _parse raise ParserError("Invalid date string: {}".format(text)) pendulum.parsing.exceptions.ParserError: Invalid date string: None