Cloud Function to load alerts to BigQuery tables

Skip to Test the end result

Test pieces of the cloud fnc

export GCP_PROJECT=$GOOGLE_CLOUD_PROJECT
export SURVEY=ztf
export TESTID=storebq
# cd /Users/troyraen/Documents/broker/storebq/broker/cloud_functions/store_BigQuery
import troy_fncs as troy
from broker_utils import data_utils, gcp_utils, schema_maps
import main

schema_map = schema_maps.load_schema_map(SURVEY, TESTID)
kwargs = {'drop_cutouts': True, 'schema_map': schema_map}
alert_dict = troy.load_alert_file(kwargs)

gcp_utils.insert_rows_bigquery(table_id, [alert_dict])

setup/run/stop/delete the testing instance

Create/delete a broker testing instance

# get the code
git clone https://github.com/mwvgroup/Pitt-Google-Broker
cd Pitt-Google-Broker
git checkout tjr/store_bigquery
cd broker/setup_broker

# create/delete the instance
survey="ztf"
testid="storebq"
teardown="False"
# teardown="True"
./setup_broker.sh "$testid" "$teardown" "$survey"

nconductVM="${survey}-night-conductor-${testid}"
gcloud compute instances set-machine-type $nconductVM --machine-type g1-small

Push some alerts through

from broker_utils import consumer_sim

testid = 'storebq'
survey = 'ztf'
instance = (survey, testid)

alert_rate = (100, 'once')
consumer_sim.publish_stream(alert_rate, instance)

# alert_rate = 'ztf-active-avg'
# runtime = (10, 'min')  # options: 'sec', 'min', 'hr', 'night'(=10 hrs)
# consumer_sim.publish_stream(alert_rate, instance, runtime)

Stop the broker, which triggers night conductor to shut everything down and process the streams.

topic="${survey}-cue_night_conductor-${testid}"
cue=END
gcloud pubsub topics publish "$topic" --message="$cue"