今天写了到偏序问题,发现博主真的是个傻X
以前的写法
/************************************************************** Problem: 3262 User: MiEcoku Language: C++ Result: Accepted Time:3416 ms Memory:6296 kb ****************************************************************/ #include <cstdio> #include <algorithm> using namespace std; #define mid ( l + r >> 1) #define lowbit(x) ( x & -x) struct GG { int x, y, z, id, w; void get () { scanf("%d%d%d", &x, &y, &z); } }a[], b[]; ], k; ; ; x -= lowbit(x)) ans += dis[x]; return ans; } void add(int x, int y) { for ( ; x <= k; x += lowbit(x)) dis[x] += y; } bool cmp1(GG a, GG b) { return a.x < b.x || (a.x == b.x && a.y < b.y) || (a.x == b.x && a.y == b.y && a.z < b.z); } bool cmp2(GG a, GG b) { return a.y < b.y || (a.y == b.y && a.z < b.z); } ]; void cdq(int l, int r) { , r); sort(b+l, b+mid+, cmp2); sort(b+mid+, b+r+, cmp2); ; for ( int i = l; i <= mid; i ++) { while( b[q].y < b[i].y && q <= r) ans[b[q].id] += query(b[q].z), q ++; add(b[i].z, b[i].w); } while( q <= r) ans[b[q].id] += query(b[q].z), q ++; for ( int i = l; i <= mid; i ++) add(b[i].z, -b[i].w); } ]; int main() { scanf("%d%d", &_, &k); ; i <= _; i ++) a[i].get(); sort( a+, a++_, cmp1); ; i <= _; i ++) { cnt ++; ].x || a[i].y != a[i+].y || a[i].z != a[i+].z) b[++ n] = a[i], b[n].w = cnt, cnt = , b[n].id = n; } cdq(, n); ; i <= n; i ++) d[ans[b[i].id]+b[i].w-] += b[i].w; ; i < _; i ++) printf("%d\n", d[i]); }
可以看出这是个n * logn^2 的算法,这也是博主在大多数地方看到的写法,结果傻叉的以为cdq就是n * long n ^ 2 的算法
实际上可不可以更快?
void cdq(int l, int r) { , r); sort(b+l, b+mid+, cmp2); sort(b+mid+, b+r+, cmp2); ; for ( int i = l; i <= mid; i ++) { while( b[q].y < b[i].y && q <= r) ans[b[q].id] += query(b[q].z), q ++; add(b[i].z, b[i].w); } while( q <= r) ans[b[q].id] += query(b[q].z), q ++; for ( int i = l; i <= mid; i ++) add(b[i].z, -b[i].w); }
可以看出,在cdq内部排了个序,然而真的有这个必要吗? 答案是否定的
我们都知道归并排序,可以看出,内部这个排序无非是想要将第二位排序
但在内部这个循环内就已经排序,所以那个排序可以省略
我们新开个数组
void cdq(int l, int r) { , r); , cnt = ; for ( int i = l; i <= mid; i ++) { while( b[q].y < b[i].y && q <= r) ans[b[q].id] += query(b[q].z), T[++ cnt] = b[q], q ++; add(b[i].z, b[i].w); T[++ cnt] = b[i]; } while( q <= r) ans[b[q].id] += query(b[q].z), T[++ cnt] = b[q], q ++; for ( int i = l; i <= mid; i ++) add(b[i].z, -b[i].w); cnt = ; for ( int i = l; i <= r; ++ i) b[i] = T[++ cnt]; }
这样,我们就省略了内部的一个排序问题,做到n * long n
还能再快不?
能!
for ( int i = l; i <= mid; i ++) add(b[i].z, -b[i].w);
对于这句话,我们能不能将它优化掉?
我们考虑加入一个时间戳 tim
int query(int x, int K) { ; ; x -= lowbit(x)) if( mark[x] == K) ans += dis[x]; return ans; } void add(int x, int y, int K) { for ( ; x <= k; x += lowbit(x)) if( mark[x] == K) dis[x] += y; else mark[x] = K, dis[x] = y; }
然后对树状数组进行点修改
这样我们就将常数再次优化了下
上总代码
/************************************************************** Problem: 3262 User: MiEcoku Language: C++ Result: Accepted Time:1428 ms Memory:9028 kb ****************************************************************/ /************************************************************** Problem: 3262 User: MiEcoku Language: C++ Result: Accepted Time:3416 ms Memory:6296 kb ****************************************************************/ #include <cstdio> #include <algorithm> using namespace std; #define mid ( l + r >> 1) #define lowbit(x) ( x & -x) struct GG { int x, y, z, id, w; void get () { scanf("%d%d%d", &x, &y, &z); } }a[], b[], T[]; ], k, tim, mark[]; int query(int x, int K) { ; ; x -= lowbit(x)) if( mark[x] == K) ans += dis[x]; return ans; } void add(int x, int y, int K) { for ( ; x <= k; x += lowbit(x)) if( mark[x] == K) dis[x] += y; else mark[x] = K, dis[x] = y; } bool cmp1(GG a, GG b) { return a.x < b.x || (a.x == b.x && a.y < b.y) || (a.x == b.x && a.y == b.y && a.z < b.z); } bool cmp2(GG a, GG b) { return a.y < b.y || (a.y == b.y && a.z < b.z); } ]; void cdq(int l, int r) { , r); , cnt = ; ++ tim; for ( int i = l; i <= mid; i ++) { while( b[q].y < b[i].y && q <= r) ans[b[q].id] += query(b[q].z, tim), T[++ cnt] = b[q], q ++; add(b[i].z, b[i].w, tim); T[++ cnt] = b[i]; } while( q <= r) ans[b[q].id] += query(b[q].z, tim), T[++ cnt] = b[q], q ++; cnt = ; for ( int i = l; i <= r; ++ i) b[i] = T[++ cnt]; } ]; int main() { scanf("%d%d", &_, &k); ; i <= _; i ++) a[i].get(); sort( a+, a++_, cmp1); ; i <= _; i ++) { cnt ++; ].x || a[i].y != a[i+].y || a[i].z != a[i+].z) b[++ n] = a[i], b[n].w = cnt, cnt = , b[n].id = n; } cdq(, n); ; i <= n; i ++) d[ans[b[i].id]+b[i].w-] += b[i].w; ; i < _; i ++) printf("%d\n", d[i]); }
博主因为傻逼不知道这么写然后考试写的KT-tree被卡了