Commit d66ab702 authored by Erik Alexandre Pucci's avatar Erik Alexandre Pucci

database: Finalize load DW script

Signed-off-by: default avatarErik Alexandre Pucci <eap08@c3sl.ufpr.br>
parent 6fa61373
......@@ -30,14 +30,13 @@ user=$2
password=$3
createdb ${database}
psql -c "create role ${user} login;"
psql -c "alter role ${user} password '${password}';"
psql -c "create role ${user} login; alter role ${user} password '${password}';"
psql -d ${database} -c "create language plpgsql;"
psql -d ${database} -f ${PREFIX}/create_types.sql
psql -d ${database} -f ${PREFIX}/create_staging_areas.sql
psql -d ${database} -f ${PREFIX}/create_data_warehouse.sql
psql -d ${database} -f ${PREFIX}/create_data_marts.sql
psql -d ${database} -f ${PREFIX}/create_indexes.sql
psql -d ${database} -f ${PREFIX}/add_comments.sql
psql -d ${database} -f "${PREFIX}/create_types.sql"
psql -d ${database} -f "${PREFIX}/create_staging_areas.sql"
psql -d ${database} -f "${PREFIX}/create_data_warehouse.sql"
psql -d ${database} -f "${PREFIX}/create_data_marts.sql"
psql -d ${database} -f "${PREFIX}/create_indexes.sql"
psql -d ${database} -f "${PREFIX}/add_comments.sql"
bash ${PREFIX}/grant_permissions.sh ${database} ${user}
#!/bin/bash
# Copyright (C) 2004-2010 Centro de Computacao Cientifica e Software Livre
# Copyright (C) 2009-2012 Centro de Computacao Cientifica e Software Livre
# Departamento de Informatica - Universidade Federal do Parana - C3SL/UFPR
#
# This file is part of database
......@@ -25,21 +25,21 @@ if test $# -lt 1 -o $# -gt 6; then
fi
database=$1
DUMPDIR='/home/postgres/dump'
#LOADDIR='/home/postgres/novos_dispositivos'
LOGFILE="/home/postgres/log/load.out"
DUMPFILE="backup_$(date +%y_%m_%d).sql"
DUMP_PARAM="-Fc -b"
DUMPDIR="/home/postgres/dump"
LOADDIR="/home/postgres/load"
DUMPFILE="${DUMPDIR}/backup_$(date +%F).sql"
DUMPPARAM="-Fc -b"
cd "${DUMPDIR}"
date +"Backup started at %F %T"
pg_dump ${DUMPPARAM} ${database} > "${DUMPFILE}"
date +"Backup completed at %F %T"
date +"Backup started at %F %T" >> "${LOGFILE}"
pg_dump ${database} ${DUMP_PARAM} > "${DUMPFILE}"
date +"Backup completed at %F %T" >> "${LOGFILE}"
date +"Data warehouse load started at %F %T"
psql -d ${database} -f "${LOADDIR}/load_data_warehouse.sql"
psql -d ${database} -c "select load_data_warehouse();"
date +"Data warehouse load finished at %F %T"
#cd "${LOADDIR}"
date +"Load started at %F %T" >> "${LOGFILE}"
psql -d ${database} -c "select f_load_dw();"
psql -d ${database} -c "select load_mectb13();"
date +"Load finished at %F %T" >> "${LOGFILE}"
date +"Data marts load started at %F %T"
psql -d ${database} -f "${LOADDIR}/load_data_marts.sql"
psql -d ${database} -c "select load_data_marts();"
date +"Data marts load finished at %F %T"
......@@ -146,8 +146,15 @@ begin
raise log 'Done';
/* ---------------------------------------------------------------------- */
/* Finally, insert new data into fact_inventory */
/* Get minimum date from which the load must take effect in fact_alert */
select min(contact_date) into min_date from sa_inventory;
select max(contact_date) into tmp from fact_inventory;
if tmp < min_date then
min_date := tmp;
end;
/* Insert new data into fact_inventory using the temporary tables */
raise log 'Inserting new machines inventories and contacts into '
'"fact_inventory"...';
insert into fact_inventory (load_date, contact_date, machine_id, os_id,
......@@ -170,9 +177,6 @@ begin
count_total := count_total + tmp;
raise log 'Done - % rows inserted', tmp;
/* Get minimum date from which the load must take effect in fact_alert */
select min(contact_date) into min_date from sa_inventory;
/* Insert machine modifications data into fact_alert */
raise log 'Inserting alert data from "fact_inventory" to "fact_alert"...';
insert into fact_alert (load_date, old_inventory, inventory, memory_alert,
......@@ -189,11 +193,29 @@ begin
) select load_date_id, o.id, n.id, case when n.memory_size <
o.memory_size * 0.9 then true else false end, case when n.hd_size <
o.hd_size * 0.9 then true else false end from tmp_alert o, tmp_alert
n where o.machine_id = n.machine_id and o.row_n = n.row_n - 1;
n where o.machine_id = n.machine_id and o.row_n = n.row_n - 1
except select load_date_id, old_inventory, inventory, memory_alert,
hd_alert from fact_alert;
get diagnostics tmp = ROW_COUNT;
count_total := count_total + tmp;
raise log 'Done - % rows inserted', tmp;
raise log 'Updating existing rows with new network usage data in '
'"fact_net_usage"...';
update fact_net_usage f set load_date = load_date_id, down_bytes = sdb,
down_packages = sdp, up_bytes = sub, up_packages = sup
from (select t.id, t.collect_time, sum(down_bytes) as sdb,
sum(down_packages) as sdp, sum(up_bytes) as sub, sum(up_packages) as
sup from (select id, collect_time from sa_net_usage n, dim_school s
where n.inep = s.inep
intersect select school_id, collect_time from fact_net_usage) t,
sa_net_usage n, dim_school s where n.inep = s.inep and t.id = s.id
and t.collect_time = n.collect_time group by t.id, t.collect_time) u
where school_id = u.id and f.collect_time = u.collect_time;
get diagnostics tmp = ROW_COUNT;
count_total := count_total + tmp;
raise log 'Done - % rows updated', tmp;
raise log 'Inserting new network usage data into "fact_net_usage"...';
insert into fact_net_usage (load_date, school_id, collect_time, down_bytes,
down_packages, up_bytes, up_packages)
......
......@@ -36,6 +36,6 @@ pg_dump -Fc -b -t mectb* ${old_database} | pg_restore -d ${new_database}
date +"Dump-restore process completed at %F %T"
date +"Migration process started at %F %T"
psql -d ${new_database} -f ${PREFIX}/migrate_data.sql
psql -d ${new_database} -c 'select migrate_data()'
psql -d ${new_database} -f "${PREFIX}/migrate_data.sql"
psql -d ${new_database} -c "select migrate_data()"
date +"Migration process completed at %F %T"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment