I am writing a new cloud function and am using the new Google Cloud Logging library as announced at https://cloud.google.com/blog/products/devops-sre/google-cloud-logging-python-client-library-v3-0-0-release.
I am also using functions-framework to debug my code locally before pushing it to GCP. Setup and Invoke Cloud Functions using Python has been particularly useful here.
The problem I have is that when using these two things together I cannot see logging output in my IDE, I can only see print statements. Here's a sample of my code:
from flask import Request
from google.cloud import bigquery
from datetime import datetime
import google.cloud.logging
import logging
log_client = google.cloud.logging.Client()
log_client.setup_logging()
def main(request) -> str:
#
# do stuff to setup a bigquery job
#
bq_client = bigquery.Client()
job_config = bigquery.QueryJobConfig(labels={"key": "value"})
nowstr = datetime.now().strftime("%Y%m%d%H%M%S%f")
job_id = f"qwerty-{nowstr}"
query_job = bq_client.query(
query=export_script, job_config=job_config, job_id=job_id
)
print("Started job: {}".format(query_job.job_id))
query_job.result() # Waits for job to complete.
logging.info(f"job_id={query_job.job_id}")
logging.info(f"total_bytes_billed={query_job.total_bytes_billed}")
return f"{query_job.job_id} {query_job.state} {query_job.error_result}"
However when I run the function using cloud functions the only output I see is in my terminal is
Started job: qwerty-20220306181905424093
As you can see the call to print(...)
has outputted to my terminal but the call to logging.info(...)
has not. Is there a way to redirect logging output to my terminal when running locally using functions-framework but not affect logging when the function is running as an actual cloud function in GCP?
Thanks to the advice from @cryptofool I figured out that I needed to change the default logging level to get output to appear in the terminal.
from flask import Request
from google.cloud import bigquery
from datetime import datetime
import google.cloud.logging
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def main(request) -> str:
#
# do stuff to setup a bigquery job
#
bq_client = bigquery.Client()
job_config = bigquery.QueryJobConfig(labels={"key": "value"})
nowstr = datetime.now().strftime("%Y%m%d%H%M%S%f")
job_id = f"qwerty-{nowstr}"
query_job = bq_client.query(
query=export_script, job_config=job_config, job_id=job_id
)
print("Started job: {}".format(query_job.job_id))
query_job.result() # Waits for job to complete.
logging.info(f"job_id={query_job.job_id}")
logging.info(f"total_bytes_billed={query_job.total_bytes_billed}")
return f"{query_job.job_id} {query_job.state} {query_job.error_result}"
Started job: qwerty-20220306211233889260
INFO:root:job_id=qwerty-20220306211233889260
INFO:root:total_bytes_billed=31457280
However, I still can't any output in the terminal when using google.cloud.logging
from flask import Request
from google.cloud import bigquery
from datetime import datetime
import google.cloud.logging
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
log_client = google.cloud.logging.Client()
log_client.setup_logging()
def main(request) -> str:
#
# do stuff to setup a bigquery job
#
bq_client = bigquery.Client()
job_config = bigquery.QueryJobConfig(labels={"key": "value"})
nowstr = datetime.now().strftime("%Y%m%d%H%M%S%f")
job_id = f"qwerty-{nowstr}"
query_job = bq_client.query(
query=export_script, job_config=job_config, job_id=job_id
)
print("Started job: {}".format(query_job.job_id))
query_job.result() # Waits for job to complete.
logging.info(f"job_id={query_job.job_id}")
logging.info(f"total_bytes_billed={query_job.total_bytes_billed}")
return f"{query_job.job_id} {query_job.state} {query_job.error_result}"
Started job: qwerty-20220306211718088936
I think I'll start another thread about this.