打码五分钟,Debug四小时?玄学错误求助!
查看原帖
打码五分钟,Debug四小时?玄学错误求助!
59824
doughty楼主2020/8/18 22:21
#include<iostream>
#include<cstdio>
#define rg register
#define ll unsigned long long
using namespace std;
ll sum[50010],f[50010],q[50010];
int n,m,L;
double Y(int j){return f[j]+(sum[j]+j)*(sum[j]+j);}
double X(int j){return 2*(sum[j]+j);}
double G(int i){return i-1+sum[i]-L;}
double DG(int i){return G(i)*G(i);}
double T(int i,int j){return (Y(i)-Y(j))/((X(i)-X(j)));}
int main()
{
	scanf("%d%d",&n,&L);
	for(rg int i=1;i<=n;i++){
		int x;
		scanf("%d",&x);
		sum[i]=sum[i-1]+x;
	}
	int l=1,r=1;
	for(rg int i=1;i<=n;i++){
		while(l<r&&T(q[l],q[l+1])<=G(i)) l++;
		f[i]=Y(q[l])+DG(i)-G(i)*X(q[l]);
		while(l<r&&T(i,q[r-1])<=T(q[r-1],q[r]))r--;
		q[++r]=i;
	}
	printf("%lld",f[n]);
	return 0;

f[i]=min{f[j]+(ij1+sum[i]sum[j]L)2(i-j-1+sum[i]-sum[j]-L)^2} 通过展开得到一长串式子,再通过换元达到最终目的。这样做为什么不行?

2020/8/18 22:21
加载中...