Minimum Time Required

  • + 0 comments

    JS Solution

    If minDays += 1 instead of minDays = midDays + 1 is used, the time complexity would exceed the limit. Can anyone explain why?

    function minTime(machines, goal) {
      let minDays = 0;
      let maxDays = Math.max(...machines) * goal;
    
      while (minDays < maxDays) { 
        let midDays = Math.floor((minDays + maxDays) / 2);
        let totalItems = 0;
    
        for (let machine of machines) {
          totalItems += Math.floor(midDays / machine);
        }
    
        if (totalItems < goal) {
          minDays = midDays + 1;
        } else {
          maxDays = midDays;
        }
      }
      return minDays;
    }
    
    // console.log(minTime([2, 3, 2], 10)); // 8
    // console.log(minTime([2, 3], 5)); // 6
    // console.log(minTime([1, 3, 4], 10)); // 7
    // console.log(minTime([4, 5, 6], 12)); // 20