@@ -87,6 +87,30 @@ def list_table_data project_id:, dataset_id:, table_id:
8787 # [END list_table_data]
8888end
8989
90+ def import_table_data project_id :, dataset_id :, table_id :, row_data :
91+ # [START import_table_data]
92+ # project_id = "Your Google Cloud project ID"
93+ # dataset_id = "ID of the dataset containing table"
94+ # table_id = "ID of the table to import data into"
95+ # row_data = [{ column1: value, column2: value }, ...]
96+
97+ require "google/cloud"
98+
99+ gcloud = Google ::Cloud . new project_id
100+ bigquery = gcloud . bigquery
101+ dataset = bigquery . dataset dataset_id
102+ table = dataset . table table_id
103+
104+ response = table . insert row_data
105+
106+ if response . success?
107+ puts "Inserted rows successfully"
108+ else
109+ puts "Failed to insert #{ response . error_rows . count } rows"
110+ end
111+ # [END import_table_data]
112+ end
113+
90114def import_table_data_from_file project_id :, dataset_id :, table_id :,
91115 local_file_path :
92116 # [START import_table_data_from_file]
@@ -117,7 +141,7 @@ def import_table_data_from_cloud_storage project_id:, dataset_id:, table_id:,
117141 # [START import_table_data_from_cloud_storage]
118142 # project_id = "Your Google Cloud project ID"
119143 # dataset_id = "ID of the dataset containing table"
120- # table_id = "ID of the table to import file data into"
144+ # table_id = "ID of the table to import data into"
121145 # storage_path = "Storage path to file to import, eg. gs://bucket/file.csv"
122146
123147 require "google/cloud"
@@ -204,6 +228,8 @@ def run_query_as_job project_id:, query_string:
204228end
205229
206230if __FILE__ == $PROGRAM_NAME
231+ require "json"
232+
207233 project_id = ENV [ "GOOGLE_CLOUD_PROJECT" ]
208234 command = ARGV . shift
209235
@@ -233,6 +259,11 @@ def run_query_as_job project_id:, query_string:
233259 dataset_id : ARGV . shift ,
234260 table_id : ARGV . shift ,
235261 storage_path : ARGV . shift
262+ when "import_data"
263+ import_table_data project_id : project_id ,
264+ dataset_id : ARGV . shift ,
265+ table_id : ARGV . shift ,
266+ row_data : JSON . parse ( ARGV . shift )
236267 when "export"
237268 export_table_data_to_cloud_storage project_id : project_id ,
238269 dataset_id : ARGV . shift ,
@@ -253,6 +284,7 @@ def run_query_as_job project_id:, query_string:
253284 list_data <dataset_id> <table_id> List data in table with the specified ID
254285 import_file <dataset_id> <table_id> <file_path>
255286 import_gcs <dataset_id> <table_id> <cloud_storage_path>
287+ import_data <dataset_id> <table_id> "[{ <json row data> }]"
256288 export <dataset_id> <table_id> <cloud_storage_path>
257289 query <query>
258290 query_job <query>
0 commit comments