You are viewing a single comment's thread. Return to all comments →
JS Solution
If minDays += 1 instead of minDays = midDays + 1 is used, the time complexity would exceed the limit. Can anyone explain why?
minDays += 1
minDays = midDays + 1
function minTime(machines, goal) { let minDays = 0; let maxDays = Math.max(...machines) * goal; while (minDays < maxDays) { let midDays = Math.floor((minDays + maxDays) / 2); let totalItems = 0; for (let machine of machines) { totalItems += Math.floor(midDays / machine); } if (totalItems < goal) { minDays = midDays + 1; } else { maxDays = midDays; } } return minDays; } // console.log(minTime([2, 3, 2], 10)); // 8 // console.log(minTime([2, 3], 5)); // 6 // console.log(minTime([1, 3, 4], 10)); // 7 // console.log(minTime([4, 5, 6], 12)); // 20
Seems like cookies are disabled on this browser, please enable them to open this website
Minimum Time Required
You are viewing a single comment's thread. Return to all comments →
JS Solution
If
minDays += 1
instead ofminDays = midDays + 1
is used, the time complexity would exceed the limit. Can anyone explain why?