Monitoring archive generation for every hour

Home Forums DBA – Wiki Monitoring archive generation for every hour

Tagged: ,

This topic contains 0 replies, has 1 voice, and was last updated by  Amit Bansal 5 years, 2 months ago.

  • Author
    Posts
  • #1932

    Amit Bansal
    Keymaster

    You can use following query for monitoring archive generation on hourly basis

    set linesize 140
    set feedback off
    set timing off
    set pagesize 1000
    col ARCHIVED format a8
    col ins    format 99  heading "DB"
    col member format a80
    col status format a12
    col archive_date format a20
    col member format a60
    col type   format a10
    col group#  format 99999999
    col min_archive_interval format a20
    col max_archive_interval format a20
    col h00 heading "H00" format  a3
    col h01 heading "H01" format  a3
    col h02 heading "H02" format  a3
    col h03 heading "H03" format  a3
    col h04 heading "H04" format  a3
    col h05 heading "H05" format  a3
    col h06 heading "H06" format  a3
    col h07 heading "H07" format  a3
    col h08 heading "H08" format  a3
    col h09 heading "H09" format  a3
    col h10 heading "H10" format  a3
    col h11 heading "H11" format  a3
    col h12 heading "H12" format  a3
    col h13 heading "H13" format  a3
    col h14 heading "H14" format  a3
    col h15 heading "H15" format  a3
    col h16 heading "H16" format  a3
    col h17 heading "H17" format  a3
    col h18 heading "H18" format  a3
    col h19 heading "H19" format  a3
    col h20 heading "H20" format  a3
    col h21 heading "H21" format  a3
    col h22 heading "H22" format  a3
    col h23 heading "H23" format  a3
    col total format a6
    col date format a10
    
    -- select * from v$logfile order by group#;
    -- select * from v$log order by SEQUENCE#;
    
    select max( sequence#) last_sequence, max(completion_time) completion_time, max(block_size) block_size from v$archived_log ;
    
    SELECT instance ins,
    log_date "DATE" ,
    lpad(to_char(NVL( COUNT( * ) , 0 )),6,' ') Total,
    lpad(to_char(NVL( SUM( decode( log_hour , '00' , 1 ) ) , 0 )),3,' ') h00 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '01' , 1 ) ) , 0 )),3,' ') h01 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '02' , 1 ) ) , 0 )),3,' ') h02 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '03' , 1 ) ) , 0 )),3,' ') h03 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '04' , 1 ) ) , 0 )),3,' ') h04 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '05' , 1 ) ) , 0 )),3,' ') h05 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '06' , 1 ) ) , 0 )),3,' ') h06 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '07' , 1 ) ) , 0 )),3,' ') h07 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '08' , 1 ) ) , 0 )),3,' ') h08 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '09' , 1 ) ) , 0 )),3,' ') h09 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '10' , 1 ) ) , 0 )),3,' ') h10 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '11' , 1 ) ) , 0 )),3,' ') h11 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '12' , 1 ) ) , 0 )),3,' ') h12 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '13' , 1 ) ) , 0 )),3,' ') h13 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '14' , 1 ) ) , 0 )),3,' ') h14 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '15' , 1 ) ) , 0 )),3,' ') h15 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '16' , 1 ) ) , 0 )),3,' ') h16 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '17' , 1 ) ) , 0 )),3,' ') h17 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '18' , 1 ) ) , 0 )),3,' ') h18 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '19' , 1 ) ) , 0 )),3,' ') h19 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '20' , 1 ) ) , 0 )),3,' ') h20 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '21' , 1 ) ) , 0 )),3,' ') h21 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '22' , 1 ) ) , 0 )),3,' ') h22 ,
    lpad(to_char(NVL( SUM( decode( log_hour , '23' , 1 ) ) , 0 )),3,' ') h23
    FROM   (
    SELECT thread# INSTANCE ,
    TO_CHAR( first_time , 'DD-MON-YY' ) log_date ,
    TO_CHAR( first_time , 'hh24' ) log_hour
    FROM   v$log_history
    )
    GROUP  BY INSTANCE ,
    log_date
    ORDER  BY INSTANCE ,
    to_date(log_date,'DD-MON-YY');

    11g introduces PIVOT function which lets you write cross-tabulation queries that rotate rows into columns, aggregating data in the process of the rotation. For 11g it can be re-written as

    col 00 heading "00" format  999
    col 01 heading "01" format  999
    col 02 heading "02" format  999
    col 03 heading "03" format  999
    col 04 heading "04" format  999
    col 05 heading "05" format  999
    col 06 heading "06" format  999
    col 07 heading "07" format  999
    col 08 heading "08" format  999
    col 09 heading "09" format  999
    col 10 heading "10" format  999
    col 11 heading "11" format  999
    col 12 heading "12" format  999
    col 13 heading "13" format  999
    col 14 heading "14" format  999
    col 15 heading "15" format  999
    col 16 heading "16" format  999
    col 17 heading "17" format  999
    col 18 heading "18" format  999
    col 19 heading "19" format  999
    col 20 heading "20" format  999
    col 21 heading "21" format  999
    col 22 heading "22" format  999
    col 23 heading "23" format  999
    select * from (
    SELECT thread# INSTANCE ,
    TO_CHAR( first_time , 'DD-MON-YY' ) log_date ,
    TO_CHAR( first_time , 'hh24' ) log_hour ,count(*) num_arch
    FROM   v$log_history where first_time >sysdate -7 group by thread# ,TO_CHAR( first_time , 'DD-MON-YY' ),TO_CHAR( first_time , 'hh24' ) )
    pivot (sum (num_arch) for log_hour in (00,01,02,03,04,05,06,07,08,09,10,11,12,13,14,15,16,17,18,19,20,21,22,23)) order by log_date,instance;

You must be logged in to reply to this topic.