comments | difficulty | edit_url | tags | |
---|---|---|---|---|
true |
Easy |
|
We have two special characters:
- The first character can be represented by one bit
0
. - The second character can be represented by two bits (
10
or11
).
Given a binary array bits
that ends with 0
, return true
if the last character must be a one-bit character.
Example 1:
Input: bits = [1,0,0] Output: true Explanation: The only way to decode it is two-bit character and one-bit character. So the last character is one-bit character.
Example 2:
Input: bits = [1,1,1,0] Output: false Explanation: The only way to decode it is two-bit character and two-bit character. So the last character is not one-bit character.
Constraints:
1 <= bits.length <= 1000
bits[i]
is either0
or1
.
class Solution:
def isOneBitCharacter(self, bits: List[int]) -> bool:
i, n = 0, len(bits)
while i < n - 1:
i += bits[i] + 1
return i == n - 1
class Solution {
public boolean isOneBitCharacter(int[] bits) {
int i = 0, n = bits.length;
while (i < n - 1) {
i += bits[i] + 1;
}
return i == n - 1;
}
}
class Solution {
public:
bool isOneBitCharacter(vector<int>& bits) {
int i = 0, n = bits.size();
while (i < n - 1) i += bits[i] + 1;
return i == n - 1;
}
};
func isOneBitCharacter(bits []int) bool {
i, n := 0, len(bits)
for i < n-1 {
i += bits[i] + 1
}
return i == n-1
}
/**
* @param {number[]} bits
* @return {boolean}
*/
var isOneBitCharacter = function (bits) {
let i = 0;
const n = bits.length;
while (i < n - 1) {
i += bits[i] + 1;
}
return i == n - 1;
};