Decorate a python function to function as a Google Cloud function

I wrote this for a class project, the backend of this dog voting site. I noticed that the duplicate code of several functions I was writing was deployed as a cloud function: they were all encapsulated in try / except blocks that returned either 200 and JSON, or 500 and one followed . (I understand that it would be better to return a structured and informative error code, but this has facilitated debugging and we have not had any comprehensive front-end error handling. -end: it was a loss that I was willing to accept).

The decorator assigns each function a connection pool, uses jsonschema to validate inputs and outputs, responds to CORS options from anywhere, and uses print instructions for logging. It's not great, but it was much easier to set up than the Google Cloud Function logging library, and everything printed on stdout when running the function was logged in GCP Stackdriver.

Here is the decorator himself:

import tools
import json
import tracing

import jsonschema
from util.get_pool import get_pool

pg_pool = None


def cloudfunction (in_schema = None, out_schema = None):
"" "

: param in_schema: the schema for entry or a falsity value if there is no entry
: param out_schema: the schema for the output, or a falsity value if there is no output
: return: the function encapsulated in the cloud function
"" "
# Both schemas must be valid in accordance with Project 7 of Jsonschema, if they are provided.
if in_schema:
jsonschema.Draft7Validator.check_schema (in_schema)
if out_schema:
jsonschema.Draft7Validator.check_schema (out_schema)

def cloudfunction_decorator (f):
"" "Envelops a function with two arguments, the first of which is a json object that should be sent with the
request, and the second is a postgresql pool. He modifies it by:
- definition of CORS headers and response to OPTIONS requests with `Allow-Origin *`
- pass a connection from a postgres global connection pool
- adding logging, all entries as well as error traces.

: param f: A function that takes a `request` and a` pgpool` and returns a json-serializable object
: return: a function that accepts an argument, a Flask query, and calls f with the listed changes
"" "

@ functools.wraps (f)
def wrapped (request):
pg_pool global

if request.method == & OPTIONS & # 39 ;:
return cors_options ()

# If this is not a CORS OPTIONS request, always include the basic header.
headers = {-Access-Control-Allow-Origin: & # 39; * & # 39;

otherwise pg_pool:
pg_pool = get_pool ()

try:
conn = pg_pool.getconn ()

if in_schema:
request_json = request.get_json ()
print (repr ({"request_json": request_json}))
jsonschema.validate (request_json, in_schema)
function_output = f (request_json, conn)
other:
function_output = f (conn)

if out_schema:
jsonschema.validate (function_output, out_schema)

conn.commit ()
print (repr ({"response_json": function_output}))

response_json = json.dumps (function_output)
# TODO allows functions to specify return codes in non-unique cases
return (response_json, 200, headers)
except:
print ("Error: traceback exception:" + repr (traceback.format_exc ()))
return (traceback.format_exc (), 500, headers)
finally:
# Make sure to restore the connection in the pool, even if there was an exception.
try:
pg_pool.putconn (conn)
except NameError: # conn may not be set, depending on the case where the error occurs above
pass

return packed

return cloudfunction_decorator


# If an OPTIONS request is given, tell the requester that we allow all CORS requests (pre-flight phase)
def cors_options ():
# Allow GET and POST requests of any origin with the content type.
# header and cache the preflight response for a 3600
headers = {
& # 39; Access-Control-Allow-Origin: & # 39;
& # 39; Access-Control-Allow-Headers & # 39; Content Type & # 39;
& # 39; Access-Control-Max-Age & # 39; 3600 & # 39;
}

return (& # 39 ;, 204, headers)

get_pool is here:

importenv
from psycopg2 import OperationalError, sign in
from psycopg2.pool import SimpleConnectionPool

INSTANCE_CONNECTION_NAME = getenv (& # 39; INSTANCE_CONNECTION_NAME & # 39 ;, "")

POSTGRES_USER = getenv (& # 39; POSTGRES_USER & # 39 ;, "")
POSTGRES_PASSWORD = getenv ('POSTGRES_PASSWORD', '' ')
POSTGRES_NAME = getenv ('POSTGRES_DATABASE', 'postgres')

pg_config = {
& # 39; user & # 39 ;: POSTGRES_USER,
& # 39; password & # 39; POSTGRES_PASSWORD,
& # 39; basename & # 39 ;: POSTGRES_NAME
}


def get_pool ():
try:
return __connect (f / cloudsql / {INSTANCE_CONNECTION_NAME} & # 39;)
except OperationalError:
# If the production parameters fail, use those of the local development
return __connect (& # 39; localhost & # 39;)


def __connect (host):
"" "
Support functions to connect to Postgres
"" "
pg_config['host'] = host
return SimpleConnectionPool (1, 1, ** pg_config)


def get_connection (host = "localhost"):
pg_config["host"] = host
return connect (** pg_config)

And an example of use:





@cloudfunction (
in_schema = {"type": "string"},
out_schema = {
"any": [{
            "type": "object",
            "properties": {
                "dog1": {"type": "integer"},
                "dog2": {"type": "integer"}
            },
            "additionalProperties": False,
            "minProperties": 2,
        }, {
            "type": "null"
        }]
    })
def get_dog_pair (request_json, conn):
    [function body elided]