#include <iostream>
#include <cstdio>
#include <cstring>
#include <algorithm>
#include <vector>
#define MAXN 5010
using namespace std;
bool G[MAXN][MAXN];
struct node {
int u, v, w;
} edge[MAXN];
struct node2 {
int to, w;
};
vector<node2> G2[MAXN];
vector<int> child[MAXN];
int read() {
int x = 0, f = 1;
char ch = getchar();
while (!isdigit(ch)) {
if (ch == '-') f = -1;
ch = getchar();
}
while (isdigit(ch)) {
x = x * 10 + ch - '0';
ch = getchar();
}
return x * f;
}
int n, d[MAXN], f[MAXN], ans, cnt, fa[MAXN], W[MAXN], dfs[MAXN], eid;
bool used[MAXN];
void dfs1(int u, int v) {
dfs[u] = ++eid;
for (int i = 0; i < G2[u].size(); i++) {
node2 p = G2[u][i];
if (v == p.to || !G[u][p.to]) continue;
child[u].push_back(p.to);
fa[p.to] = u;
W[p.to] = p.w;
dfs1(p.to, u);
}
}
void init_dfs2() {
for (int i = 0; i <= n + 1; i++) {
used[i] = false;
d[i] = f[i] = 0;
}
ans = 0;
}
void dfs2(int u, int v) {
for (int i = 0; i < G2[u].size(); i++) {
node2 p = G2[u][i];
if (v == p.to || !G[u][p.to]) continue;
dfs2(p.to, u);
ans = max(ans, d[u] + d[p.to] + p.w);
d[u] = max(d[u], d[p.to] + p.w);
}
}
int main() {
n = read();
for (int i = 1; i < n; i++) {
edge[i].u = read(); edge[i].v = read(); edge[i].w = read();
G2[edge[i].u].push_back((node2){edge[i].v, edge[i].w});
G2[edge[i].v].push_back((node2){edge[i].u, edge[i].w});
G[edge[i].u][edge[i].v] = G[edge[i].v][edge[i].u] = true;
}
dfs1(1, -1);
dfs2(1, -1);
cnt = ans;
for (int i = 1; i <= n; i++) {
if (child[i].size() >= 2) continue;
if (child[i].size() == 0) {
G[i][fa[i]] = G[fa[i]][i] = false;
for (int j = 1; j <= n; j++) {
if (j != i && j != fa[i] && !G[i][j]) {
G[i][j] = G[j][i] = true;
G2[i].push_back((node2){j, W[i]});
G2[j].push_back((node2){i, W[i]});
init_dfs2();
dfs2(1, -1);
cnt = min(cnt, ans);
G[i][j] = G[j][i] = false;
}
}
G[fa[i]][i] = G[i][fa[i]] = true;
}
if (child[i].size() == 1 && fa[i] != 0) {
int Child = child[i][0];
G[i][Child] = G[Child][i] = false;
for (int j = 1; j <= n; j++) {
if (j != Child && j != i && !G[j][Child] && dfs[j] < dfs[Child]) {
G[j][Child] = G[Child][j] = true;
G2[Child].push_back((node2){j, W[Child]});
G2[j].push_back((node2){Child, W[Child]});
init_dfs2();
dfs2(1, -1);
cnt = min(cnt, ans);
G[j][Child] = G[Child][j] = false;
}
}
G[Child][i] = G[i][Child] = true;
}
}
printf("%d\n", cnt);
return 0;
}
写这个代码就是想检验以下这道题的数据的,但是为什么样例AC了,但是测试点全部TLE了
可能是O(n^3)的这个做法的常数大了一些?