admin管理员组

文章数量:1323714

In my cloud code, I would like to update all of my record which is around 50k with a new data. But I noticed that my job fails even though I follow 1000 records limit. I get success/error was not called error for this job. Any Idea how can I resolve this?

Parse.Cloud.job("hello", function(request, response) {
Parse.Cloud.useMasterKey();  
var results = [];
var limit = 1000;

var saveUpdatedQueries = function(queries) {
    console.log("updating records " + queries.length);

    Parse.Object.saveAll(queries,{
        success:function(lists){
        console.log("lists ok "+lists.length);

        if (!results.length) {
            response.success("finished");
            return;
        }

        updatingRecords(lists.length);

        },error: function(reason){
            console.log("error");
        }
    });
}

var updatingRecords = function(skip) {
    var tempRecords = [];

    if (skip) {
        results = results.slice(skip);
    }

    console.log("skip: " + skip + " Results length: "+ results.length);

    for (var i = 0; i < results.length; i++) {
        var today = new Date();
        var newObject = results[i];
        newObject.set('newCulumn', today);
        tempRecords.push(newObject);

        if (i === results.length - 1 || tempRecords.length === limit) {
            break;
        };
    };

    saveUpdatedQueries(tempRecords);
}

var processCallback = function(res) {
    results = results.concat(res);
    if (res.length === limit) {
        process(res[res.length - 1].id);
        return;
    }

    updatingRecords(0);
}

var process = function(skip) {
    var query = new Parse.Query(Parse.Installation);

    if (skip) {
        query.greaterThan("objectId", skip);
    }

    query.limit(limit);
    query.ascending("objectId");
    query.find().then(function querySuccess(res) {
    processCallback(res);

    }, function queryFailed(reason) {
        if (reason.code == 155 || reason.code == 141) { // exceeded parse timout
            console.log("time out error");
            process(skip);
        } else {
            response.error("query unsuccessful, length of result " + results.length + ", error:" + reason.code + " " + reason.message);
        }
    });
}

process(false);

});

In my cloud code, I would like to update all of my record which is around 50k with a new data. But I noticed that my job fails even though I follow 1000 records limit. I get success/error was not called error for this job. Any Idea how can I resolve this?

Parse.Cloud.job("hello", function(request, response) {
Parse.Cloud.useMasterKey();  
var results = [];
var limit = 1000;

var saveUpdatedQueries = function(queries) {
    console.log("updating records " + queries.length);

    Parse.Object.saveAll(queries,{
        success:function(lists){
        console.log("lists ok "+lists.length);

        if (!results.length) {
            response.success("finished");
            return;
        }

        updatingRecords(lists.length);

        },error: function(reason){
            console.log("error");
        }
    });
}

var updatingRecords = function(skip) {
    var tempRecords = [];

    if (skip) {
        results = results.slice(skip);
    }

    console.log("skip: " + skip + " Results length: "+ results.length);

    for (var i = 0; i < results.length; i++) {
        var today = new Date();
        var newObject = results[i];
        newObject.set('newCulumn', today);
        tempRecords.push(newObject);

        if (i === results.length - 1 || tempRecords.length === limit) {
            break;
        };
    };

    saveUpdatedQueries(tempRecords);
}

var processCallback = function(res) {
    results = results.concat(res);
    if (res.length === limit) {
        process(res[res.length - 1].id);
        return;
    }

    updatingRecords(0);
}

var process = function(skip) {
    var query = new Parse.Query(Parse.Installation);

    if (skip) {
        query.greaterThan("objectId", skip);
    }

    query.limit(limit);
    query.ascending("objectId");
    query.find().then(function querySuccess(res) {
    processCallback(res);

    }, function queryFailed(reason) {
        if (reason.code == 155 || reason.code == 141) { // exceeded parse timout
            console.log("time out error");
            process(skip);
        } else {
            response.error("query unsuccessful, length of result " + results.length + ", error:" + reason.code + " " + reason.message);
        }
    });
}

process(false);

});
Share Improve this question edited Dec 20, 2015 at 5:49 Soheil asked Dec 15, 2015 at 11:47 SoheilSoheil 5,3643 gold badges25 silver badges43 bronze badges 11
  • 1 Why are you not using Promises ? – Mo Nazemi Commented Dec 15, 2015 at 16:33
  • @MoNazemi I tried with saveAll promises, but I still get the same result – Soheil Commented Dec 16, 2015 at 2:08
  • How does it fail? Does it time out? A job will be cut after 15 seconds... – Marius Waldal Commented Dec 20, 2015 at 13:48
  • 3 You will hit the free plan limit if you make more than 1800 requests per minute. – Mo Nazemi Commented Dec 20, 2015 at 20:09
  • 2 It's a good practice to embed retention (e.g. 1/3/7 days of inactivity) pushes inside your application, rather than using Parse. Your app has exact knowledge when was the last time it was launched, and you avoid situation where you send a notification to a user who has launched your app between process time and push receive time. For android you can use Alarm Manager. iOS and Windows should support something like this too. – Zibi Commented Dec 24, 2015 at 11:05
 |  Show 6 more ments

2 Answers 2

Reset to default 1

Basically in cloud architecture, request time out time is around 60 sec, but you try to insert over thousands records in one transaction , it takes more than 60 seconds, that's why your request always fail.

There's better ways to insert bigger amount of records,

  1. Task Queues
  2. Cron or scheduled task

I think task queue is better for your problem. watch this video, you can get super idea about task queues

Task queue & cron jobs

Workaround: You could schedule a cron job in batches of an acceptably low number of records, limited by the hosting services limit you have. For example, if you can only process 10 requests every minute, you would first request all the IDs that need to be updated, then split them into chunks that the server will accept and process within the time limit. It's just a workaround.

Long-Term: A better solution would be to design your app to request as little data as possible from the server, rather than forcing the server to do all the heavy lifting. This also allows your business logic to be exposed through a convenient public API, rather than sitting as a hidden process on your server.

本文标签: javascriptHow to save batch of data in Parse Cloud CodeStack Overflow