Last active
December 13, 2015 17:08
-
-
Save jayjanssen/4945584 to your computer and use it in GitHub Desktop.
PXC locking on large transactions
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Prep: | |
| [root@node1 ~]# date; sysbench --test=sysbench_tests/db/oltp.lua \ | |
| --mysql-host=node1 --mysql-user=test --mysql-db=test \ | |
| --oltp-table-size=250000 --report-interval=1 --max-requests=0 \ | |
| --tx-rate=10 prepare | |
| node1> CREATE TABLE `other` ( | |
| `i` int(10) unsigned NOT NULL AUTO_INCREMENT, | |
| `j` varchar(32) DEFAULT NULL, | |
| PRIMARY KEY (`i`) | |
| ) ENGINE=InnoDB; | |
| node1> insert into other values (null, 'aaaaaaaa'); | |
| node1> insert into other (j) select j from other; | |
| # until there are around 512k rows in 'other' | |
| node1> drop table if exists new; create table if not exists new like other; | |
| # sysbench operates on test.sbtest1 table | |
| [root@node1 ~]# date; sysbench --test=sysbench_tests/db/oltp.lua \ | |
| --mysql-host=node1 --mysql-user=test --mysql-db=test \ | |
| --oltp-table-size=250000 --report-interval=1 --max-requests=0 \ | |
| --tx-rate=10 run | grep tps | |
| Wed Feb 13 16:40:05 CET 2013 | |
| [ 1s] threads: 1, tps: 12.06, reads/s: 168.89, writes/s: 48.25, response time: 62.77ms (95%) | |
| [ 2s] threads: 1, tps: 17.00, reads/s: 238.01, writes/s: 68.00, response time: 33.76ms (95%) | |
| [ 3s] threads: 1, tps: 10.00, reads/s: 140.00, writes/s: 40.00, response time: 39.98ms (95%) | |
| [ 4s] threads: 1, tps: 13.00, reads/s: 181.99, writes/s: 52.00, response time: 23.83ms (95%) | |
| [ 5s] threads: 1, tps: 11.00, reads/s: 154.04, writes/s: 44.01, response time: 40.89ms (95%) | |
| [ 6s] threads: 1, tps: 6.00, reads/s: 83.98, writes/s: 24.00, response time: 19.75ms (95%) | |
| [ 7s] threads: 1, tps: 12.00, reads/s: 168.01, writes/s: 48.00, response time: 18.21ms (95%) | |
| [ 8s] threads: 1, tps: 12.00, reads/s: 168.00, writes/s: 48.00, response time: 18.42ms (95%) | |
| [ 9s] threads: 1, tps: 9.00, reads/s: 140.00, writes/s: 40.00, response time: 18.89ms (95%) | |
| [ 10s] threads: 1, tps: 11.99, reads/s: 153.87, writes/s: 43.96, response time: 15.73ms (95%) | |
| [ 11s] threads: 1, tps: 15.01, reads/s: 210.19, writes/s: 60.05, response time: 16.02ms (95%) | |
| [ 12s] threads: 1, tps: 9.00, reads/s: 126.00, writes/s: 36.00, response time: 48.35ms (95%) | |
| [ 13s] threads: 1, tps: 12.00, reads/s: 168.00, writes/s: 48.00, response time: 20.25ms (95%) | |
| [ 14s] threads: 1, tps: 17.00, reads/s: 238.01, writes/s: 68.00, response time: 28.73ms (95%) | |
| [ 15s] threads: 1, tps: 10.00, reads/s: 139.99, writes/s: 40.00, response time: 21.90ms (95%) | |
| [ 16s] threads: 1, tps: 10.00, reads/s: 140.00, writes/s: 40.00, response time: 20.92ms (95%) | |
| [ 17s] threads: 1, tps: 13.00, reads/s: 182.00, writes/s: 52.00, response time: 16.59ms (95%) | |
| [ 18s] threads: 1, tps: 8.99, reads/s: 125.89, writes/s: 35.97, response time: 25.52ms (95%) | |
| [ 19s] threads: 1, tps: 5.00, reads/s: 70.05, writes/s: 20.01, response time: 29.92ms (95%) | |
| [ 20s] threads: 1, tps: 6.00, reads/s: 98.01, writes/s: 28.00, response time: 174.41ms (95%) | |
| [ 21s] threads: 1, tps: 4.00, reads/s: 56.00, writes/s: 16.00, response time: 443.51ms (95%) | |
| [ 22s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 23s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 24s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 25s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 26s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 27s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 28s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 29s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 30s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 31s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 32s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 33s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 34s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 35s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 36s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 37s] threads: 1, tps: 0.00, reads/s: 0.00, writes/s: 0.00, response time: 0.00ms (95%) | |
| [ 38s] threads: 1, tps: 32.00, reads/s: 434.01, writes/s: 124.00, response time: 17056.20ms (95%) | |
| [ 39s] threads: 1, tps: 63.00, reads/s: 883.03, writes/s: 252.01, response time: 13059.34ms (95%) | |
| [ 40s] threads: 1, tps: 65.99, reads/s: 936.80, writes/s: 267.94, response time: 8327.78ms (95%) | |
| [ 41s] threads: 1, tps: 53.01, reads/s: 728.08, writes/s: 208.02, response time: 3777.42ms (95%) | |
| # started at exactly the same time as the sysbench, sleeps 5 seconds and then executes the big trx | |
| [root@node2 ~]# sleep 5; date; mysql test -e "insert into new select * from other;"; date | |
| Wed Feb 13 16:40:10 CET 2013 | |
| Wed Feb 13 16:40:34 CET 2013 | |
| # Node1's myq_status -t 1 wsrep | |
| Wsrep Cluster Node Queue Ops Bytes Flow Conflct | |
| time name P cnf # name cmt sta Up Dn Up Dn Up Dn pau snt dst lcf bfa | |
| 16:40:05 trime P 6 2 node1 Sync T/T 0 0 10 0 13K 0 0.0 0 77 0 0 | |
| 16:40:06 trime P 6 2 node1 Sync T/T 0 0 13 0 18K 0 0.0 0 82 0 0 | |
| 16:40:07 trime P 6 2 node1 Sync T/T 0 0 15 0 20K 0 0.0 0 88 0 0 | |
| 16:40:09 trime P 6 2 node1 Sync T/T 0 0 13 0 19K 0 0.0 0 93 0 0 | |
| 16:40:10 trime P 6 2 node1 Sync T/T 0 0 12 0 17K 0 0.0 0 97 0 0 | |
| 16:40:11 trime P 6 2 node1 Sync T/T 0 0 6 0 8.4K 0 0.0 0 100 0 0 | |
| 16:40:12 trime P 6 2 node1 Sync T/T 0 0 13 0 20K 0 0.0 0 103 0 0 | |
| 16:40:13 trime P 6 2 node1 Sync T/T 0 0 12 0 17K 0 0.0 0 108 0 0 | |
| 16:40:14 trime P 6 2 node1 Sync T/T 0 0 12 0 17K 0 0.0 0 112 0 0 | |
| 16:40:15 trime P 6 2 node1 Sync T/T 0 0 11 0 15K 0 0.0 0 116 0 0 | |
| 16:40:16 trime P 6 2 node1 Sync T/T 0 0 13 0 19K 0 0.0 0 122 0 0 | |
| 16:40:17 trime P 6 2 node1 Sync T/T 0 0 14 0 21K 0 0.0 0 124 0 0 | |
| 16:40:18 trime P 6 2 node1 Sync T/T 0 0 13 0 17K 0 0.0 0 128 0 0 | |
| 16:40:19 trime P 6 2 node1 Sync T/T 0 0 13 0 19K 0 0.0 0 133 0 0 | |
| Wsrep Cluster Node Queue Ops Bytes Flow Conflct | |
| time name P cnf # name cmt sta Up Dn Up Dn Up Dn pau snt dst lcf bfa | |
| 16:40:20 trime P 6 2 node1 Sync T/T 0 0 10 0 13K 0 0.0 0 137 0 0 | |
| 16:40:21 trime P 6 2 node1 Sync T/T 0 0 15 0 22K 0 0.0 0 143 0 0 | |
| 16:40:22 trime P 6 2 node1 Sync T/T 0 0 14 0 21K 0 0.0 0 147 0 0 | |
| 16:40:23 trime P 6 2 node1 Sync T/T 0 0 5 0 6.8K 0 0.0 0 148 0 0 | |
| 16:40:24 trime P 6 2 node1 Sync T/T 0 0 5 0 7.5K 0 0.0 0 150 0 0 | |
| 16:40:26 trime P 6 2 node1 Sync T/T 0 0 6 2 9.3K 16M 0.0 0 151 0 0 | |
| 16:40:35 trime P 6 2 node1 Sync T/T 0 0 0 1 0 8.0 0.0 0 250 0 0 | |
| 16:40:36 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:38 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:39 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:40 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:41 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:42 trime P 6 2 node1 Sync T/T 0 0 12 1 17K 8.0 0.0 0 78 0 0 | |
| 16:40:43 trime P 6 2 node1 Sync T/T 0 0 67 0 96K 0 0.0 0 52 0 0 | |
| 16:40:44 trime P 6 2 node1 Sync T/T 0 0 73 0 105K 0 0.0 0 77 0 0 | |
| Wsrep Cluster Node Queue Ops Bytes Flow Conflct | |
| time name P cnf # name cmt sta Up Dn Up Dn Up Dn pau snt dst lcf bfa | |
| 16:40:46 trime P 6 2 node1 Sync T/T 0 0 61 0 86K 0 0.0 0 100 0 0 | |
| 16:40:47 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 100 0 0 | |
| 16:40:48 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 100 0 0 | |
| 16:40:49 trime P 6 2 node1 Sync T/T 0 0 0 0 0 0 0.0 0 100 0 0 | |
| # Node2's myq_status -t 1 wsrep | |
| Wsrep Cluster Node Queue Ops Bytes Flow Conflct | |
| time name P cnf # name cmt sta Up Dn Up Dn Up Dn pau snt dst lcf bfa | |
| 16:40:04 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 74 0 0 | |
| 16:40:05 trime P 6 2 node2 Sync T/T 0 0 0 10 0 13K 0.0 0 77 0 0 | |
| 16:40:06 trime P 6 2 node2 Sync T/T 0 0 0 13 0 18K 0.0 0 82 0 0 | |
| 16:40:07 trime P 6 2 node2 Sync T/T 0 0 0 15 0 20K 0.0 0 88 0 0 | |
| 16:40:08 trime P 6 2 node2 Sync T/T 0 0 0 13 0 19K 0.0 0 93 0 0 | |
| 16:40:09 trime P 6 2 node2 Sync T/T 0 0 0 10 0 14K 0.0 0 96 0 0 | |
| 16:40:11 trime P 6 2 node2 Sync T/T 0 0 0 8 0 11K 0.0 0 100 0 0 | |
| 16:40:12 trime P 6 2 node2 Sync T/T 0 0 0 13 0 20K 0.0 0 103 0 0 | |
| 16:40:13 trime P 6 2 node2 Sync T/T 0 0 0 11 0 17K 0.0 0 107 0 0 | |
| 16:40:14 trime P 6 2 node2 Sync T/T 0 0 0 13 0 18K 0.0 0 112 0 0 | |
| 16:40:15 trime P 6 2 node2 Sync T/T 0 0 0 12 0 17K 0.0 0 117 0 0 | |
| 16:40:16 trime P 6 2 node2 Sync T/T 0 0 0 12 0 17K 0.0 0 122 0 0 | |
| 16:40:17 trime P 6 2 node2 Sync T/T 0 0 0 14 0 21K 0.0 0 124 0 0 | |
| 16:40:18 trime P 6 2 node2 Sync T/T 0 0 0 13 0 17K 0.0 0 128 0 0 | |
| 16:40:19 trime P 6 2 node2 Sync T/T 0 0 0 13 0 19K 0.0 0 133 0 0 | |
| Wsrep Cluster Node Queue Ops Bytes Flow Conflct | |
| time name P cnf # name cmt sta Up Dn Up Dn Up Dn pau snt dst lcf bfa | |
| 16:40:20 trime P 6 2 node2 Sync T/T 0 0 0 12 0 16K 0.0 0 139 0 0 | |
| 16:40:21 trime P 6 2 node2 Sync T/T 0 0 0 15 0 22K 0.0 0 144 0 0 | |
| 16:40:22 trime P 6 2 node2 Sync T/T 0 0 0 12 0 17K 0.0 0 147 0 0 | |
| 16:40:24 trime P 6 2 node2 Sync T/T 1 0 0 8 0 11K 0.0 0 150 0 0 | |
| 16:40:25 trime P 6 2 node2 Sync T/T 0 2 1 7 16M 11K 0.0 0 151 0 0 | |
| 16:40:34 trime P 6 2 node2 Sync T/T 0 0 0 3 0 1.6K 0.0 0 250 0 0 | |
| 16:40:35 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:36 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:37 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:39 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:40 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:41 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:42 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 250 0 0 | |
| 16:40:43 trime P 6 2 node2 Sync T/T 0 0 0 41 0 57K 0.0 0 49 0 0 | |
| 16:40:44 trime P 6 2 node2 Sync T/T 0 0 0 71 0 102K 0.0 0 60 0 0 | |
| Wsrep Cluster Node Queue Ops Bytes Flow Conflct | |
| time name P cnf # name cmt sta Up Dn Up Dn Up Dn pau snt dst lcf bfa | |
| 16:40:45 trime P 6 2 node2 Sync T/T 0 0 0 89 0 125K 0.0 0 95 0 0 | |
| 16:40:46 trime P 6 2 node2 Sync T/T 0 0 0 13 0 19K 0.0 0 100 0 0 | |
| 16:40:47 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 100 0 0 | |
| 16:40:48 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 100 0 0 | |
| 16:40:49 trime P 6 2 node2 Sync T/T 0 0 0 0 0 0 0.0 0 100 0 0 | |
| # Timing: | |
| # 16:40:05 - Sysbench starts on node1 | |
| # 16:40:10 - large trx starts on node2 | |
| # 16:40:25 - large trx shows in wsrep_replicated_bytes on node2, myq_status unable to get SHOW GLOBAL STATUS after this | |
| # 16:40:26 - large trx shows in wsrep_received_bytes on node1, myq_status unable to get SHOW GLOBAL STATUS after this | |
| # 16:40:34 - node2's myq_status starts working again | |
| # 16:40:35 - node1's myq_status starts working again | |
| # 16:40:27 - sysbench trx rate drops to 0 (22s into run) | |
| # 16:40:34 - node2 registers completion of large trx | |
| # 16:40:43 - sysbench trx rate recovers (38s into run) | |
| # During the myq_status stall, I captured some SHOW FULL PROCESSLIST outputs | |
| node1 mysql> show full processlist; | |
| +--------+------------------+-------------+------+---------+--------+--------------------+--------------------------------------+-----------+---------------+-----------+ | |
| | Id | User | Host | db | Command | Time | State | Info | Rows_sent | Rows_examined | Rows_read | | |
| +--------+------------------+-------------+------+---------+--------+--------------------+--------------------------------------+-----------+---------------+-----------+ | |
| | 1 | system user | | NULL | Sleep | 102 | committed 42727 | NULL | 0 | 0 | 0 | | |
| | 2 | system user | | NULL | Sleep | 405305 | wsrep aborter idle | NULL | 0 | 0 | 0 | | |
| | 3 | system user | | NULL | Sleep | 101 | committed 42728 | NULL | 0 | 0 | 0 | | |
| | 400933 | root | localhost | NULL | Query | 0 | sleeping | show full processlist | 0 | 0 | 0 | | |
| | 401034 | test | node1:48954 | test | Query | 3 | sleeping | COMMIT | 0 | 0 | 0 | | |
| | 401077 | clustercheckuser | localhost | NULL | Query | 2 | executing | SHOW STATUS LIKE 'wsrep_local_state' | 0 | 0 | 0 | | |
| | 401078 | clustercheckuser | localhost | NULL | Query | 2 | executing | SHOW STATUS LIKE 'wsrep_local_state' | 0 | 0 | 0 | | |
| | 401079 | root | localhost | NULL | Query | 2 | executing | SHOW /*!50002 GLOBAL */ STATUS | 0 | 0 | 0 | | |
| node1 mysql> show full processlist; | |
| +--------+------------------+-------------+------+---------+--------+--------------------+--------------------------------------+-----------+---------------+-----------+ | |
| | Id | User | Host | db | Command | Time | State | Info | Rows_sent | Rows_examined | Rows_read | | |
| +--------+------------------+-------------+------+---------+--------+--------------------+--------------------------------------+-----------+---------------+-----------+ | |
| | 1 | system user | | NULL | Sleep | 129 | committed 42727 | NULL | 0 | 0 | 0 | | |
| | 2 | system user | | NULL | Sleep | 405332 | wsrep aborter idle | NULL | 0 | 0 | 0 | | |
| | 3 | system user | | NULL | Sleep | 128 | committed 42728 | NULL | 0 | 0 | 0 | | |
| | 400933 | root | localhost | NULL | Query | 0 | sleeping | show full processlist | 0 | 0 | 0 | | |
| | 401034 | test | node1:48954 | test | Query | 5 | sleeping | COMMIT | 0 | 0 | 0 | | |
| | 401137 | root | localhost | NULL | Query | 4 | closing tables | SHOW /*!50002 GLOBAL */ STATUS | 410 | 410 | 410 | | |
| | 401138 | clustercheckuser | localhost | NULL | Query | 4 | closing tables | SHOW STATUS LIKE 'wsrep_local_state' | 1 | 1 | 1 | | |
| | 401139 | clustercheckuser | localhost | NULL | Query | 4 | closing tables | SHOW STATUS LIKE 'wsrep_local_state' | 1 | 1 | 1 | | |
| +--------+------------------+-------------+------+---------+--------+--------------------+--------------------------------------+-----------+---------------+-----------+ | |
| 8 rows in set (0.76 sec) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment