DBA Data[Home] [Help]

PACKAGE: SYS.DBMS_HADOOP_INTERNAL

Source


1 PACKAGE DBMS_HADOOP_INTERNAL
2 AUTHID DEFINER
3 --ACCESSIBLE BY (DBMS_HADOOP)
4 AS
5 --
6 -- NAME
7 --   UNIX_TS_TO_DATE
8 --
9 -- DESCRIPTION
10 --   Anciliary function to convert Julian date/time value to calendar date
11 --
12 -- PARAMETERS
13 --   julian_time   - Date/time in Julian value
14 --
15 -- RETURNS
16 --   date          - Calendar date
17 --
18 -- EXCEPTIONS
19 --   NONE
20 --
21 FUNCTION UNIX_TS_TO_DATE(julian_time IN NUMBER)
22   RETURN DATE;
23 
24 --
25 -- NAME: DEBUG_USER_PRIVILEDGED
26 --
27 -- DESCRIPTION:
28 -- Check whether a given user has WRITE privileges on the ORACLE_BIGDATA_DEBUG
29 -- directory
30 --
31 -- PARAMETERS: CURRENT_USER
32 --
33 -- RETURNS:
34 -- Returns 1 if the user has WRITE privileges on ORACLE_BIGDATA_DEBUG directory
35 -- 0 otherwise.
36 --
37 FUNCTION DEBUG_USER_PRIVILEGED(CURRENT_USER VARCHAR2)
38 RETURN NUMBER;
39 
40 --
41 FUNCTION GETNUMBEROFITEMS(INSTR IN CLOB,
42                           BOUNDARYKEY IN CHAR
43 )
44 RETURN NUMBER;
45 
46 --
47 -- NAME: USER_PRIVILEDGED
48 --
49 -- DESCRIPTION:
50 -- Check whether a given user has READ privileges on the specified hadoop
51 -- cluster. The user should have READ permission on the oracle directory
52 -- object, ORACLE_BIGDATA_CONFIG.
53 --
54 -- PARAMETERS:
55 -- CLUSTER_ID: The hadoop cluster id which the user is trying to access
56 -- CURRENT_USER: The name of the current user
57 --
58 -- RETURNS:
59 -- Returns 1 if the user has READ privileges on the specified hadoop cluster,
60 -- 0 otherwise.
61 --
62 FUNCTION USER_PRIVILEGED(CLUSTER_ID IN VARCHAR2,
63                          CURRENT_USER IN VARCHAR2)
64 RETURN NUMBER;
65 
66 FUNCTION GET_DEBUG_DIR(CURRENT_USER VARCHAR2)
67   RETURN VARCHAR2;
68 
69 FUNCTION GET_CONFIG_DIR
70   RETURN VARCHAR2;
71 
72 -- getHiveTable()
73 -- DESCRIPTION
74 --   getHiveTable() is a pipelined table function that returns the rows back
75 --   from C external procedures via ODCI to PL/SQL. The rows sent from C
76 --   external procedures actually originate from various Hive metastores and
77 --   fetched via JNI calls made from hotspot JVM
78 --
79 -- PARAMETERS
80 --   configDir   - The absolute path of the ORACLE_BIGDATA_CONFIG directory
81 --   *** IMPORTANT *** The first 8 characters of configDir should include the
82 --                 length of configDir.
83 --   clusterName - The name of the Hadoop cluster where data is coming from
84 --   dbName      - The Hive database name where the data is coming from
85 --   tblName     - The Hive table where the data is coming from
86 --   createPartitions - Indicates whether to add Hive partition key columns
87 --                 to external table's column list
88 --   callType    - one of (0, 1, 2, 3).
89 --               - 0: Will fetch only database related Hive metadata
90 --               - 1: Will fetch only table related Hive metadata
91 --               - 2: Will fetch only column related Hive metadata
92 --               - 3: Will generate a create external table DDL for a
93 --                    specified Hive table
94 --
95 -- RETURNS
96 --  hiveTypeSet as pipelined rows
97 --
98 FUNCTION getHiveTable
99 (
100   configDir        IN VARCHAR2,
101   clusterName      IN VARCHAR2,
102   dbName           IN VARCHAR2,
103   tblName          IN VARCHAR2,
104   createPartitions IN VARCHAR2,
105   callType         IN NUMBER
106 )
107 RETURN hiveTypeSet PIPELINED USING HiveMetadata;
108 
109 FUNCTION GET_DDL(secureConfigDir VARCHAR2,
110                  secureDebugDir VARCHAR2,
111                  secureClusterId VARCHAR2,
112                  secureDbName VARCHAR2,
113                  secureHiveTableName VARCHAR2,
114                  createPartitions VARCHAR2
115                 )
116 RETURN CLOB;
117 
118 
119 PROCEDURE GET_NAME(NAME IN VARCHAR2, MYOWNER IN OUT VARCHAR2,
120                    MYNAME IN OUT VARCHAR2, DOWNER IN VARCHAR2);
121 
122 END DBMS_HADOOP_INTERNAL;