BigQuery Service
Stay organized with collections
Save and categorize content based on your preferences.
AI-generated Key Takeaways
-
The BigQuery service in Apps Script allows management of BigQuery projects, data uploads, and query execution using the Google BigQuery API.
-
This is an advanced service that needs to be enabled before use.
-
The BigQuery service utilizes the same objects, methods, and parameters as the public API.
The BigQuery service allows you to use the Google BigQuery API in Apps Script. This API gives users the ability to manage their BigQuery projects, upload new data, and execute queries.
Reference
For detailed information on this service, see the reference documentation for the BigQuery API. Like all advanced services in Apps Script, the BigQuery service uses the same objects, methods, and parameters as the public API. For more information, see How method signatures are determined.
To report issues and find other support, see the Google Cloud support guide.
Sample code
The sample code below uses version 2 of the API.
Run query
This sample queries a list of the daily top Google Search terms.
/** * Runs a BigQuery query and logs the results in a spreadsheet. */ functionrunQuery(){ // Replace this value with the project ID listed in the Google // Cloud Platform project. constprojectId='XXXXXXXX'; constrequest={ // TODO (developer) - Replace query with yours query:'SELECT refresh_date AS Day, term AS Top_Term, rank '+ 'FROM `bigquery-public-data.google_trends.top_terms` '+ 'WHERE rank = 1 '+ 'AND refresh_date >= DATE_SUB(CURRENT_DATE(), INTERVAL 2 WEEK) '+ 'GROUP BY Day, Top_Term, rank '+ 'ORDER BY Day DESC;', useLegacySql:false }; letqueryResults=BigQuery.Jobs.query(request,projectId); constjobId=queryResults.jobReference.jobId; // Check on status of the Query Job. letsleepTimeMs=500; while(!queryResults.jobComplete){ Utilities.sleep(sleepTimeMs); sleepTimeMs*=2; queryResults=BigQuery.Jobs.getQueryResults(projectId,jobId); } // Get all the rows of results. letrows=queryResults.rows; while(queryResults.pageToken){ queryResults=BigQuery.Jobs.getQueryResults(projectId,jobId,{ pageToken:queryResults.pageToken }); rows=rows.concat(queryResults.rows); } if(!rows){ console.log('No rows returned.'); return; } constspreadsheet=SpreadsheetApp.create('BigQuery Results'); constsheet=spreadsheet.getActiveSheet(); // Append the headers. constheaders=queryResults.schema.fields.map(function(field){ returnfield.name; }); sheet.appendRow(headers); // Append the results. constdata=newArray(rows.length); for(leti=0;i < rows.length;i++){ constcols=rows[i].f; data[i]=newArray(cols.length); for(letj=0;j < cols.length;j++){ data[i][j]=cols[j].v; } } sheet.getRange(2,1,rows.length,headers.length).setValues(data); console.log('Results spreadsheet created: %s',spreadsheet.getUrl()); }
Load CSV data
This sample creates a new table and loads a CSV file from Google Drive into it.
/** * Loads a CSV into BigQuery */ functionloadCsv(){ // Replace this value with the project ID listed in the Google // Cloud Platform project. constprojectId='XXXXXXXX'; // Create a dataset in the BigQuery UI (https://bigquery.cloud.google.com) // and enter its ID below. constdatasetId='YYYYYYYY'; // Sample CSV file of Google Trends data conforming to the schema below. // https://docs.google.com/file/d/0BwzA1Orbvy5WMXFLaTR1Z1p2UDg/edit constcsvFileId='0BwzA1Orbvy5WMXFLaTR1Z1p2UDg'; // Create the table. consttableId='pets_'+newDate().getTime(); lettable={ tableReference:{ projectId:projectId, datasetId:datasetId, tableId:tableId }, schema:{ fields:[ {name:'week',type:'STRING'}, {name:'cat',type:'INTEGER'}, {name:'dog',type:'INTEGER'}, {name:'bird',type:'INTEGER'} ] } }; try{ table=BigQuery.Tables.insert(table,projectId,datasetId); console.log('Table created: %s',table.id); }catch(err){ console.log('unable to create table'); } // Load CSV data from Drive and convert to the correct format for upload. constfile=DriveApp.getFileById(csvFileId); constdata=file.getBlob().setContentType('application/octet-stream'); // Create the data upload job. constjob={ configuration:{ load:{ destinationTable:{ projectId:projectId, datasetId:datasetId, tableId:tableId }, skipLeadingRows:1 } } }; try{ constjobResult=BigQuery.Jobs.insert(job,projectId,data); console.log(`Load job started. Status: ${jobResult.status.state}`); }catch(err){ console.log('unable to insert job'); } }