DBA Data[Home] [Help]

ODM.ODM_ABN_MODEL dependencies on NUM_LIST_TYPE

Line 6: lue = pr.target_value:
1done creating trees table:: :
1getting current state of p_trees_table_name:: :
1select distinct tree_num, depth_of_tree, predictor, active from :
1BULK:
1COLLECT:
1DONE getting current state of p_trees_table_name:: :
1COUNT:
1 DEPTH_OF_tree:: :
1 PREDICTOR:: :
1 ACTIVE:: :
1DO_NEXT_ITERATION:
1P_TERMINATE:
1P_ABN_COMPLEXITY_BIAS:
1get previous best predictor:
1 AND depth_of_tree = :
1 - 1:
1got previous best predictor:: :
1creating anc_block_probs table:
1select * from :
1done creating anc_block_probs table:: :
1creating parent_entropy table:
1done creating parent_entropy table:::
1, pn.tree_num, pn.block_num, p.target_value :
1 c , :
1) pn, :
1 where p.:
1p.BLOCK_NUM = pn.PARENT_BLOCK_NUM AND :
1pn.predictor = c.:
1pn.predictor_value = c.:
1pn.target_value = p.target_value:
1 tree_num, :
1(select e.:
1e.cnt * p.parent_entropy + odm_ABN_model.compute_log_combinations(e.cnt + :
1) compression, :
1parent_entropy * e.cnt parent_entropy, :
1 child_entropy, :
1e.cnt_bits:
1, sum(total_entropy) child_entropy, sum(cnt) cnt,:
1 where cnt > 1 :
1, sum(total_entropy)/sum(cnt) parent_entropy :
1 ) p ) :
1 order by compression desc :
1 where compression < :
1 OR :
1 != :
1TABLE_EMPTY:
1.nextval block_num, parent_block_num :
1, parent_block_num :
1 from :
1 (select /*+PARALLEL(t) USE HASH(b t)*/:
1 distinct block_num parent_block_num, t.:
1 t.:
1 (select /*+PARALLEL(t1)*/ * from :
1 t1 :
1 where :
1) t :
1 where b.tree_num = :
1 b.:
1 = t.:
1 ) :
1, parent_block_num ):
1 depth_of_tree, target_value, p.:
1, compression from :
1) s, :
1(select target_value from :
1WHERE p.:
1inserting into trees table:
1done inserting into trees table:: :
1making best splitter inactive as a seed trees:
1 set active = 0 :
1 where DEPTH_OF_TREE=1 AND PREDICTOR = :
1done making best splitter inactive as a seed trees:
1SC_FLATTEN_RULES:
1P_RULES2D_TABLE_NAME:
1P_RULES2D_CREATED:
1P_MAX_DEPTH:
1V_CURRENT_DEPTH:
1creating p_rules2d_table_name:: :
1with d1 as (select block_num, target_value, predictor_value pred_1, log2_p, t+
1arget_p from :
1 AND depth_of_tree = 1) :
1, d:
1 as (select t.block_num, t.parent_block_num, t.target_value, :
1pred_:
1t.predictor_value pred_:
1, t.log2_p, t.target_p :
1 t, d:
1TO_CHAR:
1 where t.depth_of_tree = :
1 AND t.parent_block_num = d:
1.block_num :
1AND t.target_value = d:
1.target_value) :
1select block_num, d1.pred_1, :
1NULL pred_:
1target_value, log2_p, target_p from d1:
1 UNION ALL select block_num, :
1d:
1.pred_:
1 target_value, log2_p, target_p from d:
1DONE creating p_rules2d_table_name:: :
1SC_GET_TREE_LIST:
1P_TREE_LIST:
1NUM_LIST_TYPE:
1P_MAX_DEPTH_LIST:
1getting list of trees and corresponding max depths from rules table:
1select tree_num, max(depth_of_tree) from :
1 where tree_num <= :
1 group by tree_num:
1done getting list of trees and corresponding max depths from rules table:
1V_SQL_LSTMT:
1V_SQL_COUNT:
1V_CURSOR:
1V_ROWS:
1J:
1V_CURRENT_TREE:
1 NOLOGGING as with :
1CHOP_UP:
1d1_:
1 as :
1(select block_num, target_value, predictor_value pred_1_:
1, log2_p, target_p :
1_:
1t.parent_block_num = d:
1.block_num AND :
1t.target_value = d:
1 UNION ALL select :
1.pred_1_:
1target_value, log2_p, target_p, block_num from d1:
1 target_value, log2_p, target_p, block_num from d:
1SC_CREATE_2D_QUERY:
1P_PREFIX_SQL_STMT:
1P_SQL_LSTMT:
1P_SQL_COUNT:
1P_ATTR_TAB:
1P_TREE_NUMS:
1P_DEPTHS:
1P_PARTITION:
1In sc_create_2d_query:
1max(PRED_:
1PRED_:
1EXIT:
1, max(PRED_:
1 PRED_:
1 from (select /*+ PARALLEL(t)*/ :
1, CASE WHEN t.:
1 THEN t.VALUE END PRED_:
1SC_CREATE_2D_SUFFIX:
1P_ATTR_INDEX:
1)) t) m group by :
1SC_CREATE_2D_SUBSET:
1P_DATA_SUBSET2D_TABLE_NAME:
1P_DATA_SUBSET2D_TABLE_CREATED:
1V_CREATE_TABLE_LSTMT:
1V_INSERT_TABLE_LSTMT:
1V_IGNORE:
1V_PARTITION_LIST:
1V_PARTITION:
172:
1V_ATTR:
1V_TREE_NUM:
1V_DEPTH:
1V_ATTR_TAB:
1V_TREE_NUMS:
1V_DEPTHS:
1V_NUM_PARTITIONS:
1In sc_create_2d_subset:
1Getting partition list for p_data_table_name:
1GET_PA

2: NT_PREDS_CREATED:
1P_RULES_SUBSET2D_TABLE_NAME:
1P_RULES_SUBSET2D_TABLE_CREATED:
1P_TRAIN_SUBSET2D_TABLE_NAME:
1P_TRAIN_SUBSET2D_TABLE_CREATED:
1P_CURRENT_LOG_P_TABLE_NAME:
1P_CURRENT_LOG_P_TABLE_CREATED:
1P_MODEL_LOG_P_TABLE_NAME:
1P_MODEL_LOG_P_TABLE_CREATED:
1P_SC_ACTUALS_TABLE_NAME:
1P_SC_ACTUALS_TABLE_CREATED:
1P_SC_SCORES_TABLE_NAME:
1P_SC_SCORES_TABLE_CREATED:
1P_SC_BEST_GUESS_TABLE_NAME:
1P_SC_BEST_GUESS_CREATED:
1P_SC_CONFUSION_TABLE_NAME:
1P_SC_CONFUSION_CREATED:
1P_SC_DATA_TRANS_TABLE_NAME:
1P_SC_DATA_TRANS_TABLE_CREATED:
1P_SC_MODEL_COST_TABLE_NAME:
1P_SC_MODEL_COST_TABLE_CREATED:
1TEMP_SEQUENCES:
1P_BLOCKS_SEQUENCE_NAME:
1P_BLOCKS_SEQUENCE_CREATED:
1P_TREE_NUM_SEQUENCE_NAME:
1P_TREE_NUM_SEQUENCE_CREATED:
1P_NODE_NUM_SEQUENCE_NAME:
1P_NODE_NUM_SEQUENCE_CREATED:
1MODEL_COST_REC:
1MODEL_DESCRIPTION:
1DATA_TRANS_COST:
1TREE_BITS:
1PRED_BITS:
1CNT_BITS:
1BASELINE_DESCRIPTION:
1SIGNIFICANT_PREDS_REC:
1COMPRESSION:
1PARENT_ENTROPY:
1PARENT_CNT_BITS:
1CHILD_ENTROPY:
1ATTR_REC:
1ATTRIBUTE_NAME:
1STRING_VALUE:
14000:
1NUM_VALUE:
1ATTR_TABLE:
1TREES_REC:
1DEPTH_OF_TREE:
1PREDICTOR:
1ACTIVE:
1TREES_TABLE:
1FUNCTION:
1COMPUTE_LOG_COMBINATIONS:
1N:
1M:
1RETURN:
1V_RESULT:
1I:
1<:
1=:
1LOG:
12.0:
1LOOP:
1+:
1-:
1PARSE_AND_ADD:
1P_TERMS:
1V_SUM:
1V_POS:
1V_START:
1V_END:
1LENGTH:
1V_LEN:
1V_OCCURRENCE:
1P_DEBUG_ENABLED:
1TRUE:
1DEBUG_DUMP:
1INSTR:
1/:
1WHILE:
1>:
1v_start:: :
1||:
1 v_end:: :
1 v_len:: :
1 v_occurrence:: :
1 current term:: :
1SUBSTR:
1TO_NUMBER:
1 v_sum:: :
1GET_COLUMN_LIST:
1OUT:
1P_SQL_STMT:
1V_CUR:
1V_COL_CNT:
1V_COL_TAB:
1DBMS_SQL:
1DESC_TAB:
1V_COLUMN_LIST:
1C_SUBPROGRAM_NAME:
1Enter :
1.:
1OPEN_CURSOR:
1PARSE:
1NATIVE:
1DESCRIBE_COLUMNS:
1CLOSE_CURSOR:
1IS NOT NULL:
1EXTEND:
1COL_NAME:
1Leave :
1OTHERS:
1ROLLBACK:
1ROLLBACK_NR:
1COMMIT:
1Error :
1SQLERRM:
1RAISE:
1ADD_TO_SELECT_LIST:
1P_SELECT_LIST:
1P_EXPRESSION:
1P_ALIAS:
1,:
1 :
1TARGET_BINNED:
1P_BIN_CAT_TABLE_NAME:
1P_TARGET_ATTRIBUTE_NAME:
1V_SQL_STMT:
1V_BIN_CNT:
1V_TARGET_BINNED:
1param p_bin_cat_table_name = :
1param p_target_attribute_name = :
1SELECT COUNT(*) :
1FROM (SELECT * :
1FROM %bin_cat_table% :
1WHERE column_name = ::target_attr) :
1WHERE rownum <= 1):
1REPLACE:
1%bin_cat_table%:
1Bind variable target_attr = :
1EXECUTE:
1IMMEDIATE:
1USING:
1Done:
1THROW_EXCEPTION:
1P_LOCATION:
1P_EXCEPTION_NUM:
1P_SUBSTITUTE_1:
1P_SUBSTITUTE_2:
1P_SUBSTITUTE_3:
1P_SUBSTITUTE_4:
1V_MESSAGE_TEXT:
1MESSAGE_TEXT_TYPE:
1V_ERROR_INSTANCE_ID:
1C_SESSION_USER:
1SYS_CONTEXT:
1userenv:
1session_user:
1C_SESSION_DETAILS:
116:
1INSTANCE:
1:::
1SESSIONID:
1FORM_SUBSTITUTED_STRING:
1DMT_OPUTIL:
1GETERRORINSTANCEID:
1RECORDDIAGNOSTICMESSAGE:
1ERROR:
1CRITICAL:
1ODM_AR_BUILD_SQL:
1ODM_SERVER_SQL:
1RAISE_APPLICATION_ERROR:
1GET_ELAPSED_MINUTES:
1START_TIME:
1END_TIME:
1ELAPSED_MINUTES:
1CEIL:
160000.0:
1INIT_TABLES_AND_SEQUENCES:
1P_TAB:
1P_SEQ:
1P_MINING_TASK_ID:
1UNIQUE_TABLE_NAME:
1UNIQUE_SEQUENCE_NAME:
1DESTROY_TABLES_AND_SEQUENCES:
1dropping sequences:
1DROP_SEQUENCE:
1done dropping sequences:
1dropping intermediate tables:
1DROP_TABLE:
1done dropping intermediate tables:
1COUNT_AVAILABLE_PREDS:
1P_NUM_AVAILABLE_PREDS:
1getting count of available predictors:
1select count(*) cnt from (select distinct :
1 from :
1 where :
1 NOT IN (select /*+HASH_AJ*/ :
1)):
1got count of available predictors:: :
1SELECT_FEATURES:
1P_COMPLEXITY_PRED_LIMIT:
1V_SP_TABLE:
1V_ROOT_TABLE:
1V_INIT_HIST:
1V_INIT_PROB:
1V_TABLE_CREATED:
1V_MAX_TREES_KEPT:
1remove excess predictors from significant_preds table:
1CREATE TABLE :
1 PARALLEL NOLOGGING AS :
1select /*+PARALLEL(h)*/ :
1, COMPRESSION, COMPRESSION_RANK, TREE_NUM, PARENT_ENTROPY, PARENT_CNT_BITS, :
1CHILD_ENTROPY, CNT_BITS, 1 ACTIVE FROM :
1(select /*+PARALLEL(j)*/ * from :
1(select /*+PARALLEL(i)*/ :
1, COMPRESSION, TREE_NUM, PARENT_ENTROPY, PARENT_CNT_BITS, :
1CHILD_ENTROPY, CNT_BITS, RANK() OVER (ORDER BY COMPRESSION DESC) COMPRESSION_+
1RANK from :
1 i ) j :
1 WHERE j.COMPRESSION_RANK <= :
1) h :
1DONE removing excess predictors f3: rom significant_preds table:: :
1set ACTIVE to -1 for predictors not used:
1UPDATE :
1 SET ACTIVE = -1 WHERE COMPRESSION_RANK > :
1DONE setting ACTIVE to -1 for predictors not used:
1remove excess predictors from p_root_entropy_table_name table:
1SELECT /*+PARALLEL(h)*/ :
1, NUM_VALUES, LOG2_NUM_VALUES, ENTROPY, CNT FROM :
1(SELECT /*+PARALLEL(r) USE_HASH(s r)*/ r.:
1, r.NUM_VALUES, r.LOG2_NUM_VALUES, r.ENTROPY, r.CNT FROM :
1 r, :
1(SELECT :
1) s :
1WHERE r.:
1 = s.:
1UNION :
1SELECT :
1 r :
1 = :
1':
1 UNION :
1SELECT s.:
1DONE with removing excess predictors from p_root_entropy_table_name:: :
1remove excess predictors from p_initial_probs_table_name table:
1SELECT /*+PARALLEL(i) USE_HASH(s i)*/ i.:
1, i.:
1, i.P, i.CNT FROM :
1 i, :
1 WHERE i.:
1SELECT i.:
1 i :
1WHERE i.:
1DONE removing excess predictors from p_initial_probs_table_name table:: :
1CREATE_BLOCK_CNTS_TABLE:
1P_BLOCKS_IDMAP_TABLE_NAME:
1V_MULT_CLAUSE:
1V_FROM_CLAUSE:
1V_WHERE_CLAUSE:
1 WHERE pc.:
1 = sc.ATTRIBUTE_NAME :
1AND pc.:
1 = sc.:
1V_WHERE_CLAUSE1:
1 * w.weight :
1, :
1 w :
1 AND w.target_value = pc.target_value :
1creating block_cnts table:
1create table :
1 NOLOGGING PARALLEL as :
1select /*+ PARALLEL(sc) ORDERED USE_HASH(sc pc) */ :
1pc.:
1, pc.:
1, pc.target_value, pc.block_num, :
1 pc.cnt :
1cnt, (pc.cnt + sc.p) :
1 adj_cnt from :
1 sc, :
1(select /*+ PARALLEL(p) PARALLEL(c) ORDERED USE_HASH(p c)*/ :
1, target_value, block_num, count(*) cnt :
1from :
1 p, :
1 c :
1where p.:
1 = c.:
1 group by :
1, target_value, block_num :
1) pc:
1 AND w.target_value = p.target_value :
1 AND w.target_value = b.target_value :
1insert into the empty conditional probability cells:
1insert into /*+APPEND*/ :
1(select /*+PARALLEL(i)*/ b.:
1, p.target_value, b.block_num, :
1 0 cnt, i.p :
1 adj_cnt :
1(select distinct block_num, :
1) b, :
1 p :
1where i.:
1 = b.:
1 MINUS :
1select /*+PARALLEL(i)*/ b.:
1, b.target_value, b.block_num, 0 cnt, i.p :
1 b :
1 AND :
1i.:
1):
1done inserting into the empty conditional probability cells:
1ANALYZE_TABLE:
1C_AT_ESTIMATE:
1done creating block_cnts table:: :
1CREATE_BLOCK_PROBS_TABLE:
1V_ANTECEDENT_P_CLAUSE:
1V_SQL_WHERE:
1 WHERE a.target_value = s1.target_value :
1IS NULL:
1(SELECT target_value, log2_p from :
1) a :
1(SELECT block_num, target_value, loglikelihood log2_p from :
1AND a.block_num = s1.block_num:
1creating block_probs table:
1select /*+ PARALLEL(s3)*/ block_num, target_value, loglikelihood, :
1 , cnt, adj_cnt, p, log2_p, target_p, log(2.0, target_p) log2_target_p FROM :
1 (select /*+ PARALLEL(s2)*/ block_num, target_value, log(2.0, target_likelih+
1ood) loglikelihood, :
1, cnt, adj_cnt, p, log2_p, :
1 target_likelihood / sum(target_likelihood) OVER(PARTITION BY block_num,+
1 :
1) target_p FROM :
1 (select /*+ PARALLEL(s1) USE_HASH(a s1) PQ_DISTRIBUTE(s1 NONE, BROADCAST)*/+
1 s1.block_num, s1.target_value, :
1 , cnt, adj_cnt, p, s1.log2_p, power(2.0, s1.log2_p + a.log2_p) target_+
1likelihood from :
1 (select /*+ PARALLEL(s)*/ block_num, target_value, :
1 , cnt, adj_cnt, p, log(2.0 ,p) log2_p from :
1 (select /*+ PARALLEL(cnts)*/ block_num, target_value, :
1 , cnt, adj_cnt, adj_cnt/sum(adj_cnt) OVER(PARTITION BY block_num, t+
1arget_value, :
1) p :
1 from :
1 cnts :
1 where :
1 NOT IN :
1 (select /*+HASH_AJ*/ :
1 from :
1)) s ) s1, :
1) s2 ) s3:
1done creating block_probs table:: :
1CREATE_ENTROPY_TABLE:
1P_ENTROPY_TABLE_NAME:
1P_ENTROPY_TABLE_CREATED:
1creating entropy table:
1 NOLOGGING as :
1select block_num, :
1-sum(target_p * log2_target_p) * sum(cnt) total_entropy, -sum(target_p * log2+
1_target_p) entropy, :
1sum(cnt) cnt :
1 group by block_num, :
1done creating entropy table:: :
1GETBESTSPLITTER:
1P_BEST_SPLITTER:
1getting best splitter:
1select :
1 where compression = :
1(select max(compression) from :
1) :
1AND rownum = 1:
1got best splitter:: :
1EXCLUDEBESTSPLITTER:
1inserting best splitter into exclusions table:
1insert into :
1 valu4: es(:
1done inserting best splitter into exclusions table:
1INIT_BLOCK_NUMS_FOR_NEW_SEED:
1P_CURRENT_TREE:
1P_STARTING_BLOCK:
1V_DEPTH_OF_TREE:
1V_CURRENT_PREDICTOR:
1creating block_nums table:
1 block_num from dual:
1done creating block_nums table:::
1creating block_ table:
1 target_value, block_num :
1(select block_num from :
1 = :
1done creating block_ table:: :
1creating blocks table:
1select /*+ PARALLEL(c) USE_HASH(p c)*/ :
1from (select /*+ PARALLEL(t) */ * from :
1 t :
1) c, :
1where p.target_value = c.:
1done creating blocks table:: :
1select tree_num, predictor, predictor_value, target_value, block_num, parent_+
1block_num :
1 where tree_num = :
1getting predictor associated with next feature tree num:
1select distinct predictor from :
1got predictor associated with next feature tree num:: :
1initializing block_cnts table for next iteration:
1done initializing block_cnts table for next iteration:
1PRUNE_INITIAL_BLOCKS:
1using MDL to determine which blocks to keep:
1delete from :
1 where block_num IN :
1(select distinct bp.block_num from :
1(select block_num, blkcnt/totcnt p_blk, -log(2.0, blkcnt/totcnt ) log2p_blk f+
1rom :
1(select sum(cnt) totcnt from :
1 where :
1) rw, :
1(select block_num, sum(cnt) blkcnt from :
1 group by block_num) blks) bp, :
1(select ct.block_num, parent_tran_cost - child_tran_cost benefit from :
1(select c.block_num, -sum(p.log2_p * c.cnt) parent_tran_cost from :
1(select * from :
1 where target_p > 0 and cnt > 0) c, :
1WHERE c.target_value = p.target_value group by c.block_num) pt,:
1(select block_num, -sum(log(2.0, target_p) * cnt) child_tran_cost, sum(cnt) c+
1nt from :
1 where target_p > 0 and cnt > 0) :
1 group by block_num) ct :
1where ct.block_num = pt.block_num) ben :
1where log2p_blk > benefit AND bp.block_num = ben.block_num):
1done using MDL to determine which blocks to keep:
1PRUNE_BLOCKS:
1P_CURRENT_DEPTH:
1 depth_of_tree = :
1(select c.block_num, -sum(log(2.0, p.target_p) * c.cnt) parent_tran_cost from+
1 :
1 where target_p > 0 and cnt > 0) p :
1WHERE c.tree_num = :
1 c.depth_of_tree = :
1 AND c.parent_block_num = p.block_num AND :
1 c.target_value = p.target_value group by c.block_num) pt,:
1CREATE_INTERNAL_PRIORS_TABLE:
1V_CREATED:
1creating internal copy of priors table:
1 ATTRIBUTE_NAME, target_value, p, log(2, p) log2_p :
1FROM :
1C_AT_COMPUTE:
1done creating internal copy of priors table:
1dropping current priors table and making a fixed schema copy of the internal +
1priors:
1 AS SELECT * from :
1DONE dropping current priors table and making a fixed schema copy of the inte+
1rnal priors:
1CREATE_SEED_FEATURE_TREES:
1P_TREES_TABLE_CREATED:
1P_NUM_TARGET_VALUES:
1V_SQL_OUTPUT:
1V_BEST_SPLITTER:
1creating exclusions table:
1 and rownum < 2:
1done creating exclusions table:::
1creating initial_probs table:
1select /*+ PARALLEL(i)*/ :
1, cnt/sum(cnt) OVER (PARTITION BY :
1) p, cnt from :
1(select /*+PARALLEL(t)*/ :
1, count(*) cnt :
1) i:
1done creating initial probs table:::
1deleting single-value predictors from :
1 IN :
1(select distinct :
1, count(*) cnt from :
1where cnt = 1):
1done deleting single-value predictors from :
1TABLE_EXIST:
1creating priors table:
1p_target_name:: :
1select target_attribute, target_value, p, cnt, adj_cnt, log(2, p) log2_p from+
1 :
1 (select :
1 target_attribute, :
1 target_value, (cnt + 1) / sum(cnt + 1) over(partition BY :
1) p, cnt, cnt + 1 adj_cnt :
1 = :
1done creating priors table:::
1making a copy of priors for internal use:
1DONE making a copy of priors for internal use:
1creating prior weights table:
1select p.target_value, (tot_cnt * p.p / cnt) weight from :
1(select target_value, p from :
1) p, :
1(select :
1 target_value, cnt from :
1) t, :
1(select sum(cnt) tot_cnt from :
1) sums :
1where p.target_value = t.target_value:
1done creating prior weights table:::
1alter the priors table to add a cnt and WEIGHT_OF_EVIDENCE column:
1ALTER TABLE :
1 ADD (CNT NUMBER, WEIGHT_OF_EVIDENCE NUMBER):
1DONE alter the priors table to 5: add a cnt and WEIGHT_OF_EVIDENCE column:
1add the cnts to the priors table:
1 p set CNT = :
1(select cnt from :
1 and i.value = p.target_value):
1DONE add the cnts and WEIGHT_OF_EVIDENCE to the priors table:
1add the WEIGHT_OF_EVIDENCE to the priors table:
1 p set WEIGHT_OF_EVIDENCE = :
1(select WEIGHT_OF_EVIDENCE from :
1(select target_value, log(2, p) - log(2, 1.0/count(*) over()) WEIGHT_OF_EVIDE+
1NCE from :
1) p1 where p.target_value = p1.target_value):
1DONE add the WEIGHT_OF_EVIDENCE to the priors table:
1computing number of target values:
1select count(*) cnt from :
1computed number of target values:: :
1E_TOO_FEW_TARGETS_NUM:
1creating root_entropy table:
1select /*+ PARALLEL(a)*/ :
1 , count(*) num_values, log(2,count(*)) log2_num_values, :
1 sum(-p*log(2,p)) entropy, sum(cnt) cnt from :
1 a :
1 group by :
1done creating root_entropy table:::
1updating root entropy table:
1update :
1 r SET ENTROPY = :
1(SELECT sum( -p * log(2.0, p)) entropy from :
1DONE updating root entropy table:
1select /*+PARALLEL(c) USE_HASH(p c)*/ :
1creating significant_preds table:
1select /*+ PARALLEL(cm)*/ :
1, compression, :
1.nextval tree_num, :
1parent_entropy, parent_cnt_bits, child_entropy, cnt_bits from :
1(select /*+ PARALLEL(sc)*/ :
1(select /*+ PARALLEL(e) PARALLEL(r) USE_HASH(r e)*/ e.:
1e.cnt * i.parent_entropy + odm_ABN_model.compute_log_combinations(e.cnt + :
1 - 1,:
1 - 1) - :
1 (child_entropy + :
1e.cnt_bits :
1) compression, i.parent_entropy * e.cnt parent_entropy, :
1odm_ABN_model.compute_log_combinations(e.cnt + :
1 - 1) parent_cnt_bits, :
1child_entropy, :
1(select /*+ PARALLEL(ce)*/ :
1, sum(total_entropy) child_entropy, sum(cnt) cnt,:
1 sum(odm_ABN_model.compute_log_combinations(cnt + :
1 - 1)) cnt_bits, count(*) num_values :
1 ce where cnt >= 1 :
1 group by :
1) e, :
1(select sum(-p * log(2.0, p)) parent_entropy from :
1 ) i :
1where e.:
1 = r.:
1) sc order by compression desc) cm:
1done creating significant_preds table:: :
1select tree_num, :
1, compression, parent_entropy, parent_cnt_bits, child_entropy, cnt_bits :
1tree_num:: :
1 p_item_column_name:: :
1 compression:: :
1 parent_entropy:: :
1 parent_cnt_bits:: :
1 child_entropy:: :
1 cnt_bits:: :
1CREATE_INITIAL_BLOCKS:
1V_NUM_TREES:
1V_TREES_TABLE:
1dropping block_nums table prior to recreating now that this level is done:
1.nextval block_num, :
1parent_block_num from :
1 (select tree_num, :
1, compression, parent_block_num from :
1 (select tree_num, i.:
1, s.compression, :
1 p.parent_block_num :
1 from :
1 s, :
1 (select distinct block_num parent_block_num from :
1 where i.:
1 = s.:
1 order by s.compression desc) :
1 order by tree_num, :
1, compression, parent_block_num):
1done creating block_nums table:: :
1creating block_numbers table:
1 depth_of_tree, target_value, :
1 predictor, :
1 predictor_value, :
1block_num, parent_block_num, compression :
1(select target_value from :
1done creating block_numbers table:: :
1creating trees table:
1 (TREE_NUM NUMBER, DEPTH_OF_TREE NUMBER, BLOCK_NUM NUMBER,:
1 PARENT_BLOCK_NUM NUMBER, TARGET_VALUE NUMBER,:
1 TARGET_VALUE_STRING VARCHAR2(4000), PREDICTOR VARCHAR2(30),:
1 PREDICTOR_VALUE NUMBER,:
1 PREDICTOR_VALUE_STRING VARCHAR2(4000), :
1 P NUMBER, LOG2_P NUMBER, CNT NUMBER, :
1 TARGET_P NUMBER, LOGLIKELIHOOD NUMBER, COMPRESSION NUMBER, WEIGHT_OF_EVIDENC+
1E NUMBER, NODE_P NUMBER, ACTIVE NUMBER):
1INSERT INTO :
1 select tree_num, depth_of_tree, b.block_num, b.parent_block_num, :
1p.target_value, TO_CHAR(p.target_value, :
1TM9:
1) target_value_string, :
1predictor, predictor_value, :
1TO_CHAR(predictor_value,:
1) predictor_value_string, :
1 p.p, p.log2_p, p.cnt, target_p, p.LOGLIKELIHOOD, b.compression, log(2, targe+
1t_p) - pr.log2_p WEIGHT_OF_EVIDENCE, :
1target_p node_p, 1 active from :
1 b, :
1 pr :
1 = b.predictor AND :
1p.:
1 = b.predictor_value AND :
1p.block_num = b.parent_block_num AND :
1p.target_value = b.target_value AND p.target_va6: lue = pr.target_value:
1done creating trees table:: :
1getting current state of p_trees_table_name:: :
1select distinct tree_num, depth_of_tree, predictor, active from :
1BULK:
1COLLECT:
1DONE getting current state of p_trees_table_name:: :
1COUNT:
1 DEPTH_OF_tree:: :
1 PREDICTOR:: :
1 ACTIVE:: :
1DO_NEXT_ITERATION:
1P_TERMINATE:
1P_ABN_COMPLEXITY_BIAS:
1get previous best predictor:
1 AND depth_of_tree = :
1 - 1:
1got previous best predictor:: :
1creating anc_block_probs table:
1select * from :
1done creating anc_block_probs table:: :
1creating parent_entropy table:
1done creating parent_entropy table:::
1, pn.tree_num, pn.block_num, p.target_value :
1 c , :
1) pn, :
1 where p.:
1p.BLOCK_NUM = pn.PARENT_BLOCK_NUM AND :
1pn.predictor = c.:
1pn.predictor_value = c.:
1pn.target_value = p.target_value:
1 tree_num, :
1(select e.:
1e.cnt * p.parent_entropy + odm_ABN_model.compute_log_combinations(e.cnt + :
1) compression, :
1parent_entropy * e.cnt parent_entropy, :
1 child_entropy, :
1e.cnt_bits:
1, sum(total_entropy) child_entropy, sum(cnt) cnt,:
1 where cnt > 1 :
1, sum(total_entropy)/sum(cnt) parent_entropy :
1 ) p ) :
1 order by compression desc :
1 where compression < :
1 OR :
1 != :
1TABLE_EMPTY:
1.nextval block_num, parent_block_num :
1, parent_block_num :
1 from :
1 (select /*+PARALLEL(t) USE HASH(b t)*/:
1 distinct block_num parent_block_num, t.:
1 t.:
1 (select /*+PARALLEL(t1)*/ * from :
1 t1 :
1 where :
1) t :
1 where b.tree_num = :
1 b.:
1 = t.:
1 ) :
1, parent_block_num ):
1 depth_of_tree, target_value, p.:
1, compression from :
1) s, :
1(select target_value from :
1WHERE p.:
1inserting into trees table:
1done inserting into trees table:: :
1making best splitter inactive as a seed trees:
1 set active = 0 :
1 where DEPTH_OF_TREE=1 AND PREDICTOR = :
1done making best splitter inactive as a seed trees:
1SC_FLATTEN_RULES:
1P_RULES2D_TABLE_NAME:
1P_RULES2D_CREATED:
1P_MAX_DEPTH:
1V_CURRENT_DEPTH:
1creating p_rules2d_table_name:: :
1with d1 as (select block_num, target_value, predictor_value pred_1, log2_p, t+
1arget_p from :
1 AND depth_of_tree = 1) :
1, d:
1 as (select t.block_num, t.parent_block_num, t.target_value, :
1pred_:
1t.predictor_value pred_:
1, t.log2_p, t.target_p :
1 t, d:
1TO_CHAR:
1 where t.depth_of_tree = :
1 AND t.parent_block_num = d:
1.block_num :
1AND t.target_value = d:
1.target_value) :
1select block_num, d1.pred_1, :
1NULL pred_:
1target_value, log2_p, target_p from d1:
1 UNION ALL select block_num, :
1d:
1.pred_:
1 target_value, log2_p, target_p from d:
1DONE creating p_rules2d_table_name:: :
1SC_GET_TREE_LIST:
1P_TREE_LIST:
1NUM_LIST_TYPE:
1P_MAX_DEPTH_LIST:
1getting list of trees and corresponding max depths from rules table:
1select tree_num, max(depth_of_tree) from :
1 where tree_num <= :
1 group by tree_num:
1done getting list of trees and corresponding max depths from rules table:
1V_SQL_LSTMT:
1V_SQL_COUNT:
1V_CURSOR:
1V_ROWS:
1J:
1V_CURRENT_TREE:
1 NOLOGGING as with :
1CHOP_UP:
1d1_:
1 as :
1(select block_num, target_value, predictor_value pred_1_:
1, log2_p, target_p :
1_:
1t.parent_block_num = d:
1.block_num AND :
1t.target_value = d:
1 UNION ALL select :
1.pred_1_:
1target_value, log2_p, target_p, block_num from d1:
1 target_value, log2_p, target_p, block_num from d:
1SC_CREATE_2D_QUERY:
1P_PREFIX_SQL_STMT:
1P_SQL_LSTMT:
1P_SQL_COUNT:
1P_ATTR_TAB:
1P_TREE_NUMS:
1P_DEPTHS:
1P_PARTITION:
1In sc_create_2d_query:
1max(PRED_:
1PRED_:
1EXIT:
1, max(PRED_:
1 PRED_:
1 from (select /*+ PARALLEL(t)*/ :
1, CASE WHEN t.:
1 THEN t.VALUE END PRED_:
1SC_CREATE_2D_SUFFIX:
1P_ATTR_INDEX:
1)) t) m group by :
1SC_CREATE_2D_SUBSET:
1P_DATA_SUBSET2D_TABLE_NAME:
1P_DATA_SUBSET2D_TABLE_CREATED:
1V_CREATE_TABLE_LSTMT:
1V_INSERT_TABLE_LSTMT:
1V_IGNORE:
1V_PARTITION_LIST:
1V_PARTITION:
172:
1V_ATTR:
1V_TREE_NUM:
1V_DEPTH:
1V_ATTR_TAB:
1V_TREE_NUMS:
1V_DEPTHS:
1V_NUM_PARTITIONS:
1In sc_create_2d_subset:
1Getting partition list for p_data_table_name:
1GET_PA
7: RTITION_LIST:
1DONE Getting partition list for p_data_table_name:: :
1:: :
1constructing partition 1 element list from p_data_table_name:: :
1 AND p_partition:: :
1DONE constructing partition 1 element list from p_data_table_name:: :
1Getting attribute, tree and tree depths lists for the apply:
1select distinct ATTRIBUTE_NAME, TREE_NUM, DEPTH_OF_TREE from (SELECT PREDICTO+
1R ATTRIBUTE_NAME, :
1DENSE_RANK() OVER (ORDER BY TREE_NUM) "TREE_NUM", DEPTH_OF_TREE :
1 where tree_num IN ( :
1)) order by tree_num, DEPTH_OF_TREE:
1DEFINE_COLUMN:
1100:
1FETCH_ROWS:
1COLUMN_VALUE:
1MOD:
1DONE Getting attribute, tree and tree depths lists for the apply:
1creating p_data_subset2d_table_name:: :
1PARTITION BY HASH (:
1PARTITIONS :
1 PARALLEL NOLOGGING as select /*+ PARALLEL(m) NO_MERGE*/ :
1 FROM (select /*+ PARALLEL(tf)*/ * FROM :
1 tf where :
1 IN (:
1DONE creating p_data_subset2d_table_name:: :
1alter session enable parallel dml:
1inserting into :
1 from partition:: :
1INSERT /*+ PARALLEL(:
1) */ INTO :
1 select /*+ PARALLEL(m) NO_MERGE*/ :
1DONE inserting into :
1DONE p_data_subset2d_table_name:: :
1SC_CREATE_FEATURE_LOG_P:
1P_FEATURE_LOG_P_TABLE_NAME:
1P_FEATURE_LOG_P_TABLE_CREATED:
1V_NULL_DEPTH:
1V_SQL_SUFFIX:
1V_SQL_STEM:
1V_SQL_PREFIX:
1In sc_create_feature_log_p:
1Getting partition list for p_data_subset2d_table_name:
1DONE Getting partition list for p_data_subset2d_table_name:: :
1creating p_feature_log_p_table_name:: :
1PARALLEL NOLOGGING as :
1select /*+ PARALLEL(ma)*/ :
1, target_value, sum(log2_p) log2_p from :
1(select /*+ PARALLEL(m) USE_HASH(fr m)*/ :
1, fr.target_value, fr.log2_p :
1 fr, :
1 m :
1 (:
1 fr.pred_:
1 = m.pred_:
1<=:
1 AND fr.pred_:
1 IS NULL) OR :
1) ma group by :
1, target_value:
1DONE creating p_feature_log_p_table_name:: :
1altering session:
1DONE altering session:
1 select /*+ PARALLEL(ma)*/ :
1DONE p_feature_log_p_table_name:: :
1LEAVING sc_create_feature_log_p:
1SC_CREATE_FEATURE_LOG_SUFFIX:
1V_MAX_DEPTH:
1In sc_create_feature_log_p multi-tree:
1LEAVING sc_create_feature_log_p multi-tree:
1SC_CREATE_SCORES:
1P_BASE_LOG_P_TABLE_NAME:
1V_SQL_UNION_CLAUSE:
1V_SQL_TABLE:
1V_SQL_TABLE2:
1V_BASE_PARTITION_LIST:
1V_DATA_PARTITION_LIST:
1In sc_create_scores - Depth > 1:
1Getting partition list for p_feature_log_p_table_name:
1DONE Getting partition list for p_feature_log_p_table_name:: :
1Getting partition list for p_base_log_p_table_name:
1DONE Getting partition list for p_base_log_p_table_name:: :
1creating p_sc_scores_table_name:: :
1 d :
1 UNION ALL select /*+PARALLEL(d)*/ d.:
1, p.target_value, p.log2_p from :
1(select /*+PARALLEL(d1)*/ distinct :
1 d1) d, :
1 p:
1 UNION ALL select /*+PARALLEL(e)*/ :
1, target_value, log2_p from :
1 e :
1creating scores by tid :
1select /*+ PARALLEL(prob)*/ :
1, target_value, :
1CASE WHEN likelihood > 1.0E-129*sum_likelihood THEN likelihood/sum_likelihood+
1 :
1ELSE 0 END predicted_probability from :
1(select /*+ PARALLEL(lsl) */ :
1power(2.0, log_shifted_likelihood) likelihood, :
1SUM(power(2.0,log_shifted_likelihood)) OVER (partition by :
1) sum_likelihood from :
1(select /*+ PARALLEL(sl) */ :
1target_value, :
1log2_p - MAX(log2_p) OVER (partition by :
1) log_shifted_likelihood from :
1(select /*+PARALLEL(d)*/ d.:
1, d.target_value, log2_p from :
1 ) group by :
1, target_value ) sl :
1) lsl:
1) prob:
1done creating scores by tid table:: :
1LEAVING sc_create_scores:
1V_SQL_TREES:
1V_SQL_PRIORS_CLAUSE:
1In sc_create_scores - Depth 1:
1(SELECT * from :
1CASE WHEN likelihood > 1.0E-129*sum_likelihood THEN likelihood/sum_likelihood+
1 ELSE 0 END predicted_probability from :
1(select /*+PARALLEL(d)*/ :
1, d.target_value, sum(log2_p) log2_p from :
1 (select :
1 (select /*+PARALLEL(d1)*/ distinct :
1 d1) :
1UNION ALL :
1(select /*+PARALLEL(a) USE_HASH(t a) PQ_DISTRIBUTE(a NONE, BROADCAST)*/ :
1, target_value, sum(log2_p) log2_p :
1 a, :
1where t.predictor = a.:
1t.predictor_value = a.:
1, target_value) ) d group by :
1, target_value) sl:
1SC_FIND_TOPK_DETAILED_RULES:
1P_I8: NT_APPLY_TABLE:
1P_RESULT_TABLE:
1P_RESULT_TABLE_CREATED:
1P_NUM_DETAILED_RULES:
1V_SQL_MA:
1V_RANK_CLAUSE:
1V_TREE_LIST:
1V_MAX_DEPTH_LIST:
1In sc_find_topK_detailed_rules-tree:
1SELECT max(depth_of_tree) from :
1E_INVALID_INPUT_NUM:
1creating p_result_table:: :
1PARTITION BY HASH ( seqid ) :
1 select /*+PARALLEL(r1) PARALLEL(pr)*/:
1r1.seqid, TARGET_VALUE_STRING pred, PREDICTED_PROBABILITY cost, block_num rul+
1e FROM :
1 (select /*+PARALLEL(r)*/ * from :
1 (select /*+PARALLEL(nr)*/ seqid, block_num, target_value,:
1 ROW_NUMBER() OVER(PARTITION BY seqid, target_value ORDER BY block_num DE+
1SC) rule_rank from :
1 (select /*+ PARALLEL(ma)*/ ma.:
1 seqid, block_num, ma.target_value from:
1 (select /*+ PARALLEL(a) USE_HASH(t a) */ :
1t.target_value, block_num from :
1WHERE t.predictor = a.:
1 AND t.predictor_value = a.:
1 ) ma, :
1 (select /*+ PARALLEL(m) USE_HASH(fr m)*/ :
1, fr.target_value, fr.block_num :
1 m WHERE :
1(:
1fr.pred_:
1m.pred_:
1 IS NULL :
1 na :
1WHERE na.:
1 = ma.:
1na.TARGET_VALUE = ma.target_value :
1select /*+ PARALLEL(d)*/ :
1 seqid, 1 BLOCK_NUM, TARGET_VALUE from :
1 d ) nr ) r :
1where rule_rank = 1) r1, :
1(select distinct target_value, target_value_string from :
1where pr.sequence_id = r1.seqid AND pr.target_value = r1.target_value :
1AND t.target_value = pr.target_value:
1DONE creating p_result_table:: :
1inserting into p_result_table:: :
1DONE inserting into p_result_table:: :
1SC_CREATE_ACTUALS_TABLE:
1creating p_sc_actuals_table_name :
1select /*+PARALLEL(t)*/ :
1 actual_target_value :
1done creating p_sc_actuals_table_name table:: :
1SC_CREATE_DATA_TRANS_COST:
1creating p_sc_data_trans_table_name:
1select -sum( log(2,p)) data_trans_cost, count(*) cnt :
1(select CASE WHEN p > (1.0 - (1.0 / (cnt + :
1 - 1.0))) :
1THEN (1.0 - (1.0 / (cnt + :
1WHEN p > (1.0 / (cnt + :
1 - 1.0)) :
1THEN p :
1ELSE (1.0 / (cnt + :
1 - 1.0)) END p from :
1(select count(*) cnt from :
1, target_value, predicted_probability p :
1, actual_target_value from :
1 = a.:
1p.target_value = a.actual_target_value ):
1done creating p_sc_data_trans_table_name table:: :
1SC_CREATE_CNTBITS_COST:
1P_CNTBITS:
1P_STANDALONE_TEST:
1V_TREE_LIMIT:
1pt.tree_num = :
1NOT:
1pt.tree_num <= :
1counting admissible models associated with feature :
1select max(cnt_bits) from :
1(select tree_num, sum(odm_ABN_model.compute_log_combinations(cnt + :
1 - 1)) cnt_bits from :
1(select pt.tree_num, block_num, sum(cnt) cnt from :
1 pt, :
1(select tree_num, max(depth_of_tree) max_depth from :
1 WHERE ACTIVE = 1 :
1 group by tree_num) md :
1where :
1 AND pt.tree_num = md.tree_num AND cnt > 1 AND pt.depth_of_tree = max_depth :
1group by pt.tree_num, block_num) :
1group by tree_num):
1DONE counting admissible models associated with feature :
1SC_CREATE_MODEL_COST:
1P_NUM_PREDICTORS:
1P_BASELINE_COST:
1P_ABN_MODEL_TYPE:
1V_TREEBITS_STMT:
1 0 :
1V_PREDBITS1_STMT:
1V_PREDBITS2_STMT:
1creating p_sc_model_cost_table_name :
1 odm_ABN_model.compute_log_combinations(2.0 * num_preds - 1.0, num_preds) :
1pred_bits :
1(select count(DISTINCT PREDICTOR) num_preds, :
1 pred_bits from :
1(select DISTINCT TREE_NUM, PREDICTOR :
1 where tree_num :
1 <= :
1 AND ACTIVE = 1 :
1) ) vp, :
1select data_trans_cost + :
1 + :
1 model_description, :
1data_trans_cost, :
1pred_bits, :
1 tree_bits, :
1 cnt_bits, :
1 baseline_description :
1DONE creating p_sc_model_cost_table_name table:: :
1select MODEL_DESCRIPTION, DATA_TRANS_COST, TREE_BITS, PRED_BITS, CNT_BITS, BA+
1SELINE_DESCRIPTION from :
1model_description:: :
1 data_trans_cost:: :
1 tree_bits:: :
1 pred_bits:: :
1 baseline_description:: :
1MDL_TEST:
1P_MIN_DESC_MODEL:
1P_CURRENT_DESC:
1P_SINGLE_FEATURE:
1V_SQL_FEATURE:
1V_CNTBITS:
1getting NAIVE BAYES cost :
1getting cost with this :
1 feature at depth :
1select MODEL_DESCRIPTION from :
1got NAIVE BAYES cost:: :
1 compared to current min:: :
1got cost with this :
1FIND_NEXT_CANDIDATE:
1P_LAST_GOOD_TREE:
1P_NEXT_CANDIDATE:
1P_ACCEPTED_TREE_CNT:
1P_CURRENT_REJECTIONS:
9: 1P_MAXIMUM_REJECTIONS:
1P_MAX_PARTITIONS:
1select min(tree_num) from :
1 where tree_num > :
1 AND active = 1:
1got next feature tree num:: :
1beginning init_block_nums_for_new_seed:: :
1DONE initialization of the :
1th tree:
1EXTEND_THIS_TREE:
1V_DUMMY:
1V_NUM_AVAILABLE_PREDS:
1starting next iteration. p_current_tree:: :
1 p_current_depth:: :
1done with next iteration. p_current_tree:: :
1ESTIMATE_FEATURE_DEPTH:
1P_ESTIMATED_FEATURE_DEPTH:
1V_NUM_PREDICTORS:
1V_TARGET_ENTROPY:
1estimating feature depth:
1select num_predictors, target_entropy from :
1(select count(distinct :
1) - 1 num_predictors from :
1), :
1(select cnt * entropy target_entropy from :
1ROUND:
15.0:
1*:
1DONE estimating feature depth:: :
1 minutes:
1ESTIMATE_FEATURE_SCORING_TIME:
1P_CUM_BLD_TIME:
1P_ESTIMATED_FEATURE_SCR_TIME:
1V_TIME_TO_BLD_ADDED_LEVELS:
1V_TIME_TO_SCR_THIS_LEVEL:
1V_MEAN_BUILD_TIME:
11.0:
1estimating feature build time:
10.1:
1DONE estimating feature build time:: :
1ESTIMATE_TIME_TO_NEXT_LEVEL:
1P_EST_TIME_TO_NEXT_LEVEL:
1estimating time to build next level:
1DONE estimating time to build next level:: :
1INSERT_INTO_TIMING_TABLE:
1P_DEPTH_OF_TREE:
1P_SEGMENT_BUILD_TIME:
1P_FEATURE_SCORING_TIME:
1P_IS_TERMINATED:
1P_IS_ACCEPTED:
1P_EST_FEATURE_SCORING_TIME:
1V_SQL_LIST:
1V_SQL_VALUES:
1 (tree_num, depth_of_tree, :
1 segment_build_time, :
1 feature_scoring_time, :
1 is_terminated, is_accepted:
1, est_feature_scoring_time:
1inserting timings into :
1DONE inserting timings for building Naive Bayes model into :
1GET_RULES_PRIOR_COST_TABLES:
1SCHEMA_OBJECT_NAME_TYPE:
1P_RULES_TABLE_NAME:
1P_PRIOR_TABLE_NAME:
1P_COST_MATRIX_TABLE_NAME:
1P_BIN_NUMERICAL_TABLE_NAME:
1V_CURRENT_EXCEPTION:
1SELECT table_1, table_5, table_6, bin_categorical_table, BIN_NUMERICAL_TABLE +
1FROM odm_mining_model :
1WHERE name = :
1p_rules_table_name:: :
1 p_prior_table_name:: :
1 p_cost_matrix_table_name:: :
1 p_bin_categorical_table_name:: :
1 p_bin_numerical_table_name:: :
1E_TABLE_NOT_FOUND_NUM:
1E_TABLE_IS_EMPTY_NUM:
1DONE validation:
1DONE get_rules_prior_cost_tables:
1in error code:: :
1INTERNAL_APPLY:
1P_APPLY_SUBSET2D_TABLE_NAME:
1P_APPLY_SUBSET2D_TABLE_CREATED:
1V_INVALID_COLUMN_NAME:
1V_FEATURE_LOG_P_TABLE_NAME:
1V_FEATURE_LOG_P_TABLE_CREATED:
1V_INTERNAL_PRIORS_TABLE_NAME:
1V_INTERNAL_PRIORS_CREATED:
19999999999:
1V_MAX_PARTITIONS:
1999999999:
1starting validation:
1p_scoring_table_name is :
1UPPER:
1p_scoring_table_name table exists:
1p_scoring_table_name table is not empty:
1p_tid_column_name is :
1E_COLUMN_NOT_FOUND_NUM:
1COLUMN_EXIST:
1NVL:
1NULL:
1p_tid_column_name found:
1p_item_column_name is :
1p_item_column_name found:
1p_value_column_name is :
1 and it is OK:
1E_MODEL_IS_EMPTY_NUM:
1model table :
1 is empty:
1p_trees_table_name table is not empty:
1done validation:
1getting max depth_of_tree:
1select max(depth_of_tree) from :
1DONE getting max depth_of_tree:
1NOT a Naive Bayes model:
1enter sc_create_feature_log_p from v_max_depth > 1:
1scoring rows:
1done scoring rows:
1IS a Naive Bayes model:
1done with cleanup:
1APPLY:
1P_DAT_SEQID_COLUMN_NAME:
1P_DAT_ATTR_COLUMN_NAME:
1P_DAT_VALUE_COLUMN_NAME:
1P_SUPPL_TABLE_NAME:
1P_SUP_SEQID_COLUMN_NAME:
1P_PREDICTION_TYPE:
1P_RES_SEQID_COLUMN_NAME:
1V_RULES2D_TABLE_NAME:
1V_RULES2D_CREATED:
1V_APPLY_SUBSET2D_TABLE_NAME:
1V_APPLY_SUBSET2D_TABLE_CREATED:
1V_PROBABILITY_TABLE_NAME:
1V_PROBABILITY_TABLE_CREATED:
1V_RES_PROB_COLUMN_NAME:
1predicted_probability:
1START supervised model compute predictions:
1ODM_SUPERVISED_MODEL:
1COMPUTE_PREDICTION:
1target_value:
1DONE supervised model compute predictions:
1APPLY_FOR_TEST:
1P_APPLY_TABLE_NAME:
1P_APP_ACTUAL_COLUMN_NAME:
1P_APP_PREDICTED_COLUMN_NAME:
1V_TARGET_VIEW_NAME:
1V_TARGET_VIEW_CREATED:
1V_APPLY_TABLE_CREATED:
1V_ABN_MODEL_TYPE:
1In apply_for_test -creating v_target_view_name:
1UNIQUE_VIEW_NAME:
1CREATE VIEW :
1 AS :
1 seq_id, :
1WHERE :
1 = ':
1In apply_for_test -DONE creating v_target_view_name:
1Invoking private apply:
1seq_id:
1C_PT_TOP:
1DONE Invoking priv10: ate apply:
1DROP_VIEW:
1APPLY_FOR_LIFT:
1P_APP_PROB_COLUMN_NAME:
1V_POS_TARGET_TABLE_NAME:
1V_POS_TARGET_TABLE_CREATED:
1V_USER:
1p_positive_target_value:: :
1creating v_target_view_name:
1DONE creating v_target_view_name:: :
1creating v_pos_target_table_name:
1 target_value :
1FROM dual:
1DONE creating v_pos_target_table_name:: :
1invoking private apply (for lift):
1C_PT_LIST:
1DONE invoking private apply (for lift):
1UPDATE_NODE_P:
1V_FOO:
1V_FOO_CREATED:
1creating temp table to hold node_p values:
1 SELECT block_num, target_value, :
1 CASE WHEN likelihood > 1.0E-129*sum_likelihood THEN likelihood/sum_li+
1kelihood ELSE 0 END node_p from :
1 (SELECT block_num, target_value, power(2.0, log_shifted_likelihood) +
1likelihood, :
1 TRUNC(SUM(power(2.0,log_shifted_likelihood)) OVER (partition by bl+
1ock_num), 20) sum_likelihood from :
1 (SELECT block_num, target_value, :
1 log2_p - MAX(log2_p) OVER (partition by block_num) log_shi+
1fted_likelihood from :
1 (SELECT tt.block_num, tt.target_value, tt.log2_p+p.log2_p log2_p +
1from :
1 (SELECT block_num, target_value, odm_abn_model.parse_and_add(l+
1p) log2_p FROM :
1 (select block_num, target_value, sys_connect_by_path(log2_p, :
1) lp from :
1 (select block_num, depth_of_tree, target_value, log2_p, bl+
1ock_num || target_value rid, :
1 parent_block_num || target_value prid from :
1 START WITH depth_of_tree = 1 connect by prior rid = prid+
1)) tt, :
1 (SELECT target_value, log2_p from :
1 WHERE p.target_value = tt.target_value) lp )):
1DONE creating temp table to hold node_p values:
1updating node_p values in trees table to reflect Bayes conditional probabilit+
1ies:
1 t SET node_p = (select node_p from :
1 f :
1 where t.block_num=f.block_num and t.target_value=f.target_value):
1DONE updating node_p values in trees table to reflect Bayes conditional proba+
1bilities:
1BUILD:
1P_BIN_NUM_TABLE_NAME:
1P_MAX_BUILD_TIME:
1P_NUM_NB_PREDICTORS:
110:
1P_ABN_PRED_BIT_FRACTION:
1V_TIME_START:
1V_TIME_END:
1V_FEATURE_START:
1V_ELAPSED_MINUTES:
1V_PREVIOUS_MINUTES:
1V_FEATURE_MINUTES:
1V_MAX_BUILD_TIME:
1P_TIMING_TABLE_CREATED:
1V_EST_FEATURE_SCORING_TIME:
1V_EST_TIME_TO_NEXT_LEVEL:
1V_ESTIMATED_FEATURE_DEPTH:
1V_PREDICTOR_NAME:
1V_BIN_COLUMN_NAME:
1PREDICTOR_VALUE:
1V_UNBIN_COLUMN_NAME:
1PREDICTOR_VALUE_STRING:
1V_TAB:
1V_SEQ:
1V_STARTING_BLOCK:
1V_TREES_TABLE_CREATED:
1V_SQL_CLAUSE:
1V_TERMINATE:
1V_CURRENT_DESC:
1V_MIN_DESC_MODEL:
1V_BASELINE_DESC:
1V_MAXIMUM_TREE_DEPTH:
1V_MAX_TREE_DEPTH:
1V_ACCEPTED_TREE_CNT:
1V_MAXIMUM_PREDICTORS:
1V_MAXIMUM_REJECTIONS:
1V_CURRENT_REJECTIONS:
1V_CURRENT_NUM_TREES:
1V_NUM_TARGET_VALUES:
1V_LAST_GOOD_TREE:
1V_DEBUG_ENABLED:
1V_NUM_CANDIDATES:
1V_BASELINE_COST:
1V_TRAINING_PARTITION:
1V_ABN_PRED_BIT_FRACTION:
1V_COMPLEXITY_PRED_LIMIT:
1V_ABN_COMPLEXITY_BIAS:
1V_MAX_ATTRIBUTES:
1Entering :
1p_max_build_time:: :
1p_maximum_tree_depth:: :
1p_maximum_predictors:: :
1p_maximum_rejections:: :
1p_num_NB_predictors:: :
1p_abn_model_type:: :
1p_abn_pred_bit_fraction:: :
1p_abn_complexity_bias:: :
1ERROR:: NULL p_training_table_name:
1ERROR:: NULL p_priors_table_name:
1ERROR:: NULL p_timing_table_name:
1ERROR:: non-existing training table, name = :
1ERROR:: non-existing num bin boundary table, name = :
1ERROR:: non-existing num cat boundary table, name = :
1ERROR:: NULL p_tid_column_name:
1ERROR:: non-existing id column, name = :
1ERROR:: NULL p_item_column_name:
1ERROR:: non-existing item column, name = :
1ERROR:: non-existing value column, name = :
1ERROR:: invalid value for p_maximum_tree_depth = :
1ERROR:: invalid value for p_maximum_predictors = :
1ERROR:: NULL p_trees_table_name:
1E_TABLE_ALREADY_EXIST_NUM:
1ERROR:: already existing trees table, name = :
1creating timing table:: :
1 (tree_num NUMBER, depth_of_tree NUMBER, segment_build_time NUMBER, :
1feature_scoring_time NUMBER, is_