set global slow_query_log='ON';
set global long_query_time=1;
show variables like 'slow_query%';
show variables like 'long_query_time';
- Now check the slow logs at
mysql.slow_log
SET GLOBAL general_log = 'ON'
SET GLOBAL log_output = 'TABLE'
show variables like '%general%';
- Now you can find the mysql operation log in
mysql.general_log
table
set global log_queries_not_using_indexes=ON;
- those queries will appears in slow_log
- in
mysql.general_log
, if the connection is reused, you should see theconnection
event only at the very beginning
mysql -uroot -ppwd < dumpfile
CREATE USER 'new_user'@'%' IDENTIFIED BY 'new_user_pwd';
GRANT ALL PRIVILEGES ON db_test.* TO 'new_user'@'%' WITH GRANT OPTION;
- Here's a cheap way to get an estimated row count:
> select TABLE_ROWS FROM INFORMATION_SCHEMA.TABLES where TABLE_SCHEMA='your_db_name' and TABLE_NAME='your_table_name';
+------------+
| TABLE_ROWS |
+------------+
| 57889 |
+------------+
1 rows in set (0.00 sec)
limit offset ,n
->where id > offset limit n
- Note:
id
is the auto incr primary key , and should be continous (that is , you should NOT delete the rows )
- COALESCE returns 1st non-NULL value
select COALESCE(AVG(distance), 0) as baseline from records where uuid=?
- use UNION
select value from conf where entry = ? UNION (SELECT '' )
- replace the value
''
in right-hand select with your expected value.
- replace the value
CASE is one of the Control Flow Functions.
name | Description | example |
---|---|---|
CASE | Case operator | |
IF() | if/else construct | if(m.title LIKE '%the%', 'THE MOVIE', NULL ) as msg |
IFNULL() | if expr is null, return the 2nd parameter | IFNULL(expression, alt_value) , kind of like expression OR alt_value |
NULLIF() | return NULL if expr1 = expr2 | NULLIF(m.genre,2) -- if equals 2, return NULL, or return that m.genre value |
-
dynamically replace column values
SELECT m.movie_id, m_movie_title, case m.genra_id WHEN 1 THEN 'Cool Stuff' WHEN 2 THEN 'Futuristic' WHEN 123 THEN 'Something else' ELSE CONCAT('GENRE ID ', m.genre_id ) END as genre FROM movies as m
-
another example
WHERE m.p1 = ? OR m.p2 = ? ORDER by CASE when (m.p1=?) and ((withdraw&1)<>0) then match_id - 10000000 when (m.p2=?) and ((withdraw&2)<>0) then match_id - 10000000 ELSE match_id END DESC LIMIT 20
不同时区的数据库,存放的date 受时区影响, UNIX_TIMESTAMP方法不是我们想要的...
SELECT TIMESTAMPDIFF( SECOND, "1970-01-01 00:00:00" , <TIMESTAMP created by MYSQL> );
select CONVERT_TZ( <timestamp column> , time_format(timediff( NOW(), UTC_TIMESTAMP) ,'+%H:%i') , '+08:00') from ...
... status=if(status="created","paid",status)
-
query , and return rank in query
select @rownum:=@rownum+1 as 'rank', uuid, rank_score from pvp_hsw, (SELECT @rownum:=0) as r -- Every derived table must have its own alias order by rank_score desc;
- here
@rownum
will safely reset everytime
- here
-
update rank info while doing query
SET @rownum=0;
update pvp_hsw as p set week_rank=@rownum:=@rownum+1 order by rank_score desc;
-
how to handle ties ?
-- 90,80,80,70 will generate rank 1,2,2,3 select CASE -- increate only if has different score WHEN @rowscore = rank_score THEN @rownum ELSE @rownum:=@rownum+1 END as 'rank', uuid, @rowscore:=rank_score , -- to update the variable bonus_honor_points, bonus_coin, bonus_diamond from pvp_hsw, (SELECT @rownum:=0, @rowscore:=0 ) as r -- Every derived table must have its own alias order by rank_score desc
-- 90,80,80,70 will generate rank 1,2,2,4 select @rownum:=@rownum+1 as rowindex, CASE -- lastrank increated only if has different score WHEN @rowscore = rank_score THEN @lastrank ELSE @lastrank:=@rownum END as 'rank', uuid, @rowscore:=rank_score , -- to update the variable bonus_honor_points, bonus_coin, bonus_diamond from pvp_hsw, (SELECT @rownum:=0, @rowscore:=0, @lastrank:=1 ) as r -- Every derived table must have its own alias order by rank_score desc
func BulkInsert( tx *sql.Tx, unsavedRows []PVP_HSW_t ) error {
// nothing update
if len(unsavedRows)== 0 {
return nil
}
valueStrings := make([]string, 0, len(unsavedRows))
valueArgs := make([]interface{}, 0, len(unsavedRows) * 3)
for _, post := range unsavedRows {
valueStrings = append(valueStrings, "(?, ?, ?, ?, ?, ?)")
valueArgs = append(valueArgs, post.Uuid)
valueArgs = append(valueArgs, post.Rank_score)
valueArgs = append(valueArgs, post.Bonus_honor_points)
valueArgs = append(valueArgs, post.Bonus_coin)
valueArgs = append(valueArgs, post.Bonus_diamond)
valueArgs = append(valueArgs, post.Rank)
}
cmd_update := fmt.Sprintf( `INSERT into pvp_hsw ( uuid, rank_score, bonus_honor_points, bonus_coin, bonus_diamond,week_rank ) VALUES %s ON DUPLICATE KEY UPDATE
rank_score=VALUES(rank_score),
bonus_honor_points=VALUES(bonus_honor_points),
bonus_coin=VALUES(bonus_coin),
bonus_diamond=VALUES(bonus_diamond),
week_rank=VALUES(week_rank)`, strings.Join(valueStrings, ",") )
_ , err := tx.Exec(cmd_update, valueArgs...)
if err != nil {
return err
}
log.Printf( "pvp_hsw bulk updated %d rows", len( unsavedRows ) )
return nil
}
unsavedRows := []PVP_HSW_t {}
for rows.Next() {
var data PVP_HSW_t
err := rows.Scan( &data.Rank, &data.Uuid, &data.Rank_score,
&data.Bonus_honor_points, &data.Bonus_coin, &data.Bonus_diamond )
if err != nil {
log.Println(err)
return
}
unsavedRows = append( unsavedRows , data )
}
batch_size := 10000
for i:=0; i<len(unsavedRows); i+= batch_size {
open_end := i+batch_size
if open_end > len(unsavedRows) {
open_end = len(unsavedRows)
}
err := BulkInsert( tx, unsavedRows[ i:open_end] )
if err != nil {
log.Println(err)
return
}
}