This is a utility to replicate the Cloud Functions production environment on development with Python and Docker.
This directory contains additional information about the published artifacts of the gcp-cloud-functions-python image
The tag name corresponds to the version of Cloud Functions Python Runtime.
- 3.12 -> Python 3.12
- 3.11 -> Python 3.11
- 3.10 -> Python 3.10
- 3.9 -> Python 3.9
- 3.8 -> Python 3.8
- 3.7 -> Python 3.7
-
Generate GCloud Credentials JSON:
gcloud auth application-default login
NOTE: This generates
application_default_credentials.json
file that will be used to run in development environment.
- NOTE: This file is required.
[tool.poetry]
name = "myproject"
version = "0.1.0"
description = "Your project description."
authors = ["Your Name <[email protected]>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "~3.10"
python-dotenv = "~1.0"
[tool.poetry.group.dev.dependencies]
functions-framework = "~3.5"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
- NOTE: This file is required.
from json import dumps as json_dumps
from os import environ as os_environ
from dotenv import load_dotenv
from functions_framework import http as functions_framework_http
load_dotenv()
GCP_PROJECT_ID = os_environ.get('GCP_PROJECT_ID')
BIGQUERY_DATASET = os_environ.get('BIGQUERY_DATASET')
BIGQUERY_TABLE = os_environ.get('BIGQUERY_DATASET')
@functions_framework_http
def load_data_to_bigquery(request):
# NOTE: HERE YOUR CODE.
response_data = {'success': True}
return json_dumps(response_data)
FROM hugofer93/gcp-cloud-functions-python:3.10
WORKDIR /opt/project
COPY . .
services:
project:
container_name: "your-container-name"
build: .
environment:
- DEBUG=true
- GOOGLE_APPLICATION_CREDENTIALS=/root/.gcloud/credentials.json
volumes:
- ".:/opt/project"
- "$HOME/.config/gcloud/application_default_credentials.json:/root/.gcloud/credentials.json:ro"
ports:
- "8080:8080"
command: >
sh -c "poetry install --no-root && tail -f /dev/null"
restart: "unless-stopped"
- NOTE: This command (example) is based on the previous samples.
docker compose exec project poetry run functions-framework --source=main.py --target=load_data_to_bigquery
- NOTE: Poetry can generate the
requirements.txt
dependencies file.
docker compose exec project poetry export --without-hashes -f requirements.txt --output requirements.txt