parameters, environment, update_options): request = self.dataflow.projects().templates().launch( projectId = self.project_id, gcsPath = template_path, body = { "jobName": job_name, "parameters": parameters, "environment": environment, "update": update_options } ) return request def deploy_dynamic_destinations_datatransfer(self, active_jobs): job_name='dynamic_destinations_datatransfer' template_name = 'PubSubToBigQueryDynamicDestinationsTemplate' template_path = "gs://{}/templates/{}".format(self.project_id, template_name) input_subscription = 'message_hub' output_default_table = 'streaming_datatransfer.streaming_dynamic_changetracktransfer' parameters = { "inputSubscription": "projects/{}/subscriptions/{}".format(self.project_id, input_subscription), "outputTableSpec": "{}:{}".format(self.project_id, output_default_table), "autoscalingAlgorithm": "THROUGHPUT_BASED" } environment = { "zone": 'us-central1-a', "machineType": 'n2-standard-2', "maxWorkers": 5 } update_options='false' if 'dynamic_destinations_datatransfer' in active_jobs: update_options='true' request = self.create_template_request(job_name, template_path, parameters, environment, update_options) request.execute() 既存パイプラインを更新 https://cloud.google.com/dataflow/docs/guides/updating-a-pipeline?hl=ja https://cloud.google.com/dataflow/docs/reference/rest/v1b3/RuntimeEnvironment CPUの使用率に応じてオートスケール https://cloud.google.com/dataflow/docs/guides/deploying- a-pipeline?hl=ja#autotuning-features