0
我正在使用谷歌應用腳本來自動執行從BigQuery導入到電子表格的過程。但應用程序腳本返回我應用腳本:bigquery的響應太大
Error 413: Message: response too large
這是我的代碼樣本,誤差爲第5行
1 var projectId = projectid;
2 var request = {
3 query: 'My Query'
4 };
5 var queryResults = BigQuery.Jobs.query(request, projectId);
什麼是一些可能的解決方案?是否有任何解決方案不會增加我的BigQuery費用?
PS:結果是大約16 MB的數據。即大約300,000行。
查詢:
SELECT
ORDER.addedon AS date,
ORDER.display_order_id AS order_id,
OrderSkuDetails.pid AS pid,
OrderSkuDetails.price AS price,
OrderSkuDetails.saleprice AS saleprice,
OrderSkuDetails.subtotal AS subtotal,
OrderSkuDetails.shippingcharge AS shippingcharge,
OrderSkuDetails.codcharge AS codcharge,
User.email AS email,
ORDER.order_id AS payment_id,
ORDER.payment_mode AS payment_mode,
ORDER.source AS source,
ORDER.user_id AS user_id,
Payments.payment_status AS payment_status,
User.profileJson.text,
OrderStatus.sub_status_id AS sub_status_id,
NProduct.featured AS featured
FROM
FLATTEN([Mixpanel_Import.Order],payment_mode) AS ORDER
INNER JOIN
[Mixpanel_Import.OrderSkuDetails] AS OrderSkuDetails
ON
ORDER.order_id=OrderSkuDetails.order_id
INNER JOIN
[Mixpanel_Import.OrderStatus] AS OrderStatus
ON
ORDER.order_id=OrderStatus.order_id
INNER JOIN
[Mixpanel_Import.User] AS User
ON
ORDER.user_id=User.__key__.id
INNER JOIN
[Mixpanel_Import.Payments] AS Payments
ON
ORDER.order_id=Payments.order_id
INNER JOIN
[Mixpanel_Import.NProduct] AS NProduct
ON
OrderSkuDetails.pid=NProduct.pid
下面是代碼
function bigQuery(tableName) {
var projectId = 'project';
var request = {
query: The Query
};
var queryResults = BigQuery.Jobs.query(request, projectId); //Error on this line
var jobId = queryResults.jobReference.jobId;
Logger.log(jobId);
var sleepTimeMs = 500;
while (!queryResults.jobComplete) {
Utilities.sleep(sleepTimeMs);
sleepTimeMs *= 2;
queryResults = BigQuery.Jobs.getQueryResults(projectId, jobId);
}
while (queryResults.pageToken) {
queryResults.concat(BigQuery.Jobs.getQueryResults(projectId, jobId, {pageToken: queryResults.pageToken}));
}
return queryResults;
}
您可能不應該試圖在電線上拉出300K行並在解決方案中顯示它們。我假設你正在構建某種Web前端? –
@polleyg它基本上用於數據分析。我將需要提取這麼多的數據,有什麼方法可以做到這一點 –
即使我將結果限制爲10000,錯誤仍然是一樣的。 –