pythonpandashttp-status-code-504

Got 504 Deadline Exceeded in Jupiter Notebook (Python) with Big query


I am trying to get a the result of a Google Bigquery query in a pandas dataframe (in Jupiter notebook).

But everytime I try to run the query I get a DeadlineExceeded: 504 Deadline Exceeded.

This happens not only for queries in my own BQ project but also for other projects. I have tried a lot of option to run the query like in here: https://cloud.google.com/bigquery/docs/bigquery-storage-python-pandas

Anyone have a idea how to fix this?

Query:


    %load_ext google.cloud.bigquery

    %%bigquery tax_forms --use_bqstorage_api
    SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012`
---------------------------------------------------------------------------
_MultiThreadedRendezvous                  Traceback (most recent call last)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\grpc_helpers.py in error_remapped_callable(*args, **kwargs)
    149             prefetch_first = getattr(callable_, "_prefetch_first_result_", True)
--> 150             return _StreamingResponseIterator(result, prefetch_first_result=prefetch_first)
    151         except grpc.RpcError as exc:

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\grpc_helpers.py in __init__(self, wrapped, prefetch_first_result)
     72             if prefetch_first_result:
---> 73                 self._stored_first_result = six.next(self._wrapped)
     74         except TypeError:

~\AppData\Local\Continuum\anaconda3\lib\site-packages\grpc\_channel.py in __next__(self)
    415     def __next__(self):
--> 416         return self._next()
    417 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\grpc\_channel.py in _next(self)
    705                 elif self._state.code is not None:
--> 706                     raise self
    707 

_MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with:
    status = StatusCode.DEADLINE_EXCEEDED
    details = "Deadline Exceeded"
    debug_error_string = "{"created":"@1597838569.388000000","description":"Error received from peer ipv4:172.217.168.202:443","file":"src/core/lib/surface/call.cc","file_line":1062,"grpc_message":"Deadline Exceeded","grpc_status":4}"
>

The above exception was the direct cause of the following exception:

DeadlineExceeded                          Traceback (most recent call last)
<ipython-input-2-4fdaec7219df> in <module>
----> 1 get_ipython().run_cell_magic('bigquery', 'tax_forms --use_bqstorage_api', 'SELECT * FROM `bigquery-public-data.irs_990.irs_990_2012`\n')

~\AppData\Local\Continuum\anaconda3\lib\site-packages\IPython\core\interactiveshell.py in run_cell_magic(self, magic_name, line, cell)
   2357             with self.builtin_trap:
   2358                 args = (magic_arg_s, cell)
-> 2359                 result = fn(*args, **kwargs)
   2360             return result
   2361 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\magics.py in _cell_magic(line, query)
    589             )
    590         else:
--> 591             result = query_job.to_dataframe(bqstorage_client=bqstorage_client)
    592 
    593         if args.destination_var:

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\job.py in to_dataframe(self, bqstorage_client, dtypes, progress_bar_type, create_bqstorage_client, date_as_object)
   3381             progress_bar_type=progress_bar_type,
   3382             create_bqstorage_client=create_bqstorage_client,
-> 3383             date_as_object=date_as_object,
   3384         )
   3385 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\table.py in to_dataframe(self, bqstorage_client, dtypes, progress_bar_type, create_bqstorage_client, date_as_object)
   1726                 progress_bar_type=progress_bar_type,
   1727                 bqstorage_client=bqstorage_client,
-> 1728                 create_bqstorage_client=create_bqstorage_client,
   1729             )
   1730 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\table.py in to_arrow(self, progress_bar_type, bqstorage_client, create_bqstorage_client)
   1544             record_batches = []
   1545             for record_batch in self._to_arrow_iterable(
-> 1546                 bqstorage_client=bqstorage_client
   1547             ):
   1548                 record_batches.append(record_batch)

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\table.py in _to_page_iterable(self, bqstorage_download, tabledata_list_download, bqstorage_client)
   1433     ):
   1434         if bqstorage_client is not None:
-> 1435             for item in bqstorage_download():
   1436                 yield item
   1437             return

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\_pandas_helpers.py in _download_table_bqstorage(project_id, table, bqstorage_client, preserve_order, selected_fields, page_to_item)
    723                     # Call result() on any finished threads to raise any
    724                     # exceptions encountered.
--> 725                     future.result()
    726 
    727                 try:

~\AppData\Local\Continuum\anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
    426                 raise CancelledError()
    427             elif self._state == FINISHED:
--> 428                 return self.__get_result()
    429 
    430             self._condition.wait(timeout)

~\AppData\Local\Continuum\anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
    382     def __get_result(self):
    383         if self._exception:
--> 384             raise self._exception
    385         else:
    386             return self._result

~\AppData\Local\Continuum\anaconda3\lib\concurrent\futures\thread.py in run(self)
     55 
     56         try:
---> 57             result = self.fn(*self.args, **self.kwargs)
     58         except BaseException as exc:
     59             self.future.set_exception(exc)

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery\_pandas_helpers.py in _download_table_bqstorage_stream(download_state, bqstorage_client, session, stream, worker_queue, page_to_item)
    591         rowstream = bqstorage_client.read_rows(position).rows(session)
    592     else:
--> 593         rowstream = bqstorage_client.read_rows(stream.name).rows(session)
    594 
    595     for page in rowstream.pages:

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery_storage_v1\client.py in read_rows(self, name, offset, retry, timeout, metadata)
    120             retry=retry,
    121             timeout=timeout,
--> 122             metadata=metadata,
    123         )
    124         return reader.ReadRowsStream(

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\cloud\bigquery_storage_v1\gapic\big_query_read_client.py in read_rows(self, read_stream, offset, retry, timeout, metadata)
    370 
    371         return self._inner_api_calls["read_rows"](
--> 372             request, retry=retry, timeout=timeout, metadata=metadata
    373         )
    374 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\gapic_v1\method.py in __call__(self, *args, **kwargs)
    143             kwargs["metadata"] = metadata
    144 
--> 145         return wrapped_func(*args, **kwargs)
    146 
    147 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\retry.py in retry_wrapped_func(*args, **kwargs)
    284                 sleep_generator,
    285                 self._deadline,
--> 286                 on_error=on_error,
    287             )
    288 

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\retry.py in retry_target(target, predicate, sleep_generator, deadline, on_error)
    182     for sleep in sleep_generator:
    183         try:
--> 184             return target()
    185 
    186         # pylint: disable=broad-except

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\timeout.py in func_with_timeout(*args, **kwargs)
    212             """Wrapped function that adds timeout."""
    213             kwargs["timeout"] = next(timeouts)
--> 214             return func(*args, **kwargs)
    215 
    216         return func_with_timeout

~\AppData\Local\Continuum\anaconda3\lib\site-packages\google\api_core\grpc_helpers.py in error_remapped_callable(*args, **kwargs)
    150             return _StreamingResponseIterator(result, prefetch_first_result=prefetch_first)
    151         except grpc.RpcError as exc:
--> 152             six.raise_from(exceptions.from_grpc_error(exc), exc)
    153 
    154     return error_remapped_callable

~\AppData\Local\Continuum\anaconda3\lib\site-packages\six.py in raise_from(value, from_value)

DeadlineExceeded: 504 Deadline Exceeded

Let me know if you need to know more. Thanks in advance.

Rutger


Solution

  • It turned out to be a conflict between a Conda package and a pip packages.

    I resolved it by reinstall all the packages.