Cod sursa(job #2938266)

Utilizator apocal1ps13Stefan Oprea Antoniu apocal1ps13 Data 11 noiembrie 2022 21:36:00
Problema Heavy Path Decomposition Scor 100
Compilator cpp-64 Status done
Runda Arhiva educationala Marime 3.1 kb
#include<iostream>
#include<fstream>
#include<queue>
#include<algorithm>
#include<vector>
const int INF = 2e9;
const int NMAX = 1e5;
const int LOG = 18;
std::ifstream in("heavypath.in");
std::ofstream out("heavypath.out");
using namespace std;
int n, m, task, u, v, counter, ans;
vector<int> cost, aint, parent, sz, depth, heavyChild, HFindex;
vector<int>heavyhead; //capatul lantului greu din care face parte nodul 
vector<int>g[NMAX + 1];

void set_fast() {
	ios_base::sync_with_stdio(false);
	in.tie(nullptr);
	out.tie(nullptr);
}

void update(int pos, int val, int l = 1, int r = n, int node = 1) {
	if (l == r) {
		aint[node] = val;
		return;
	}
	int mid = (l + r) / 2;
	if (pos <= mid) update(pos, val, l, mid, node * 2);
	else update(pos, val, mid + 1, r, node * 2 + 1);
	aint[node] = max(aint[node * 2], aint[node * 2 + 1]);
}

void dfs(int u) {
	sz[u] = 1;
	heavyChild[u] = 0;
	for (auto v : g[u]) {
		if (!sz[v]) {
			depth[v] = depth[u] + 1;
			parent[v] = u;
			dfs(v);
			sz[u] += sz[v];
			if (sz[heavyChild[u]] < sz[v]) heavyChild[u] = v;
		}
	}
}

void dfsHeavyFirst(int u) {
	counter++;
	HFindex[u] = counter;

	if (heavyChild[u]) {
		heavyhead[heavyChild[u]] = heavyhead[u];
		dfsHeavyFirst(heavyChild[u]);
	}
	for (auto v : g[u]) {
		if (sz[v] < sz[u] && v != heavyChild[u]) {
			heavyhead[v] = v;
			dfsHeavyFirst(v);
		}
	}
}

int lca(int u, int v) {
	if (heavyhead[u] == heavyhead[v]) {
		if (depth[u] < depth[v]) return u;
		return v;
	}
	else {
		if (depth[heavyhead[u]] > depth[heavyhead[v]]) {
			return lca(parent[heavyhead[u]], v);
		}
		else return lca(u, parent[heavyhead[v]]);
	}
}

void updateweight(int node, int weight) {
	update(HFindex[node], weight);
}

void queryMAX(int st, int dr, int l = 1, int r = n, int node = 1) {
	if (st <= l && r <= dr) {
		ans = max(ans, aint[node]);
		return;
	}
	int mid = (l + r) / 2;
	if (st <= mid) queryMAX(st, dr, l, mid, node * 2);
	if (mid < dr) queryMAX(st, dr, mid + 1, r, node * 2 + 1);
}

int query(int st, int dr) {
	ans = -INF;
	queryMAX(st, dr);
	return ans;
}

int maximumOnPath(int u, int v) {
	if (heavyhead[u] == heavyhead[v]) {
		if (depth[u] < depth[v]) {
			return query(HFindex[u], HFindex[v]);
		}
		else return query(HFindex[v], HFindex[u]);
	}
	else {
		if (depth[heavyhead[u]] > depth[heavyhead[v]]) {
			return max(query(HFindex[heavyhead[u]], HFindex[u]), maximumOnPath(parent[heavyhead[u]], v));
		}
		else return max(query(HFindex[heavyhead[v]], HFindex[v]), maximumOnPath(u, parent[heavyhead[v]]));
	}
}

int main() {

	set_fast();
	in >> n >> m;
	cost = parent = sz = depth = heavyChild = HFindex = heavyhead = vector<int>(n + 1);
	aint = vector<int>(4 * n + 1);
	heavyhead[1] = 1;

	for (int i = 1; i <= n; i++) in >> cost[i];
	for (int i = 1; i < n; i++) {
		in >> u >> v;
		g[u].push_back(v);
		g[v].push_back(u);
	}
	dfs(1);
	dfsHeavyFirst(1);

	for (int i = 1; i <= n; i++) updateweight(i, cost[i]);

	while (m--) {
		in >> task >> u >> v;
		if (!task) updateweight(u, v);
		else out << maximumOnPath(u, v) << '\n';
	}
	return 0;
}