#include <iostream>
#include <cstdio>
#include <cstring>
#include <algorithm>
template <class T>
inline void read(T &x) {
x = 0;
int f = 0;
char ch = getchar();
while (!isdigit(ch)) { f |= ch == '-'; ch = getchar(); }
while (isdigit(ch)) { x = (x << 1) + (x << 3) + (ch ^ 48); ch = getchar(); }
x = f ? -x : x;
return ;
}
typedef unsigned long long uLL;
typedef long long LL;
int a[100010], f[100010];
int n, ans;
int main() {
read(n);
for (int i = 1; i <= n; ++i) read(a[i]);
for (int i = 1; i <= n; ++i) {
f[i] = 1;
for (int j = std::max(1, i - 4); j < i; ++j) {
if (a[i] & a[j]) {
f[i] = std::max(f[i], f[j] + 1);
}
}
ans = std::max(ans, f[i]);
}
printf("%d\n", ans);
return 0;
}
这份显然错误的代码都能过
讨论区里还有个更离谱的 O(n) 做法都能过