DBA Data[Home] [Help]

PACKAGE: SYS.DBMS_DATAPUMP

Source


1 PACKAGE dbms_datapump AUTHID CURRENT_USER AS
2 ---------------------------------------------------------------------
3 -- Overview
4 -- This pkg implements the DBMS_DATAPUMP API, a mechanism to allow users
5 -- to move all or parts of a database between databases, superseding
6 -- functionality previously associated with the Export and Import utilities
7 -- (which will now rely on the dbms_datapump interface). Dbms_datapump will
8 -- also support the loading and unloading of data in a proprietary format.
9 ---------------------------------------------------------------------
10 -- SECURITY
11 -- This package is owned by SYS with execute access granted to PUBLIC.
12 -- It runs with invokers rights for the most part, i.e., with the security
13 -- profile of the caller. Two roles allow users to take full advantage of
14 -- the API:
15 -- EXP_FULL_DATABASE (only affects Export and Estimate operations) allows:
16 --      o Operations outside the scope of a user's schema
17 --      o Operations with increased parallelism
18 --      o Writes to log and dump files without setting up DIRECTORY objects
19 --        to reference their owning directories
20 --      o Use of sequential media within a dump file set
21 --      o Restarts of previously stopped jobs initiated by another user
22 -- IMP_FULL_DATABASE (only affects Import, Network, and Sql_file
23 --        operations) allows:
24 --      o Operations outside the scope of their schema
25 --      o Operations with increased parallelism
26 --      o Reads to dump file sets and writes to log files without the use of
27 --        DIRECTORY objects
28 --      o Use of sequential media within the dump file set
29 --      o Restarts of previously stopped jobs initiated by another user
30 -- Note that some internal operations will run in a more privileged context.
31 --------------------
32 --  PUBLIC CONSTANTS
33 --
34 KU$_STATUS_WIP                 CONSTANT BINARY_INTEGER := 1;
35 KU$_STATUS_JOB_DESC            CONSTANT BINARY_INTEGER := 2;
36 KU$_STATUS_JOB_STATUS          CONSTANT BINARY_INTEGER := 4;
37 KU$_STATUS_JOB_ERROR           CONSTANT BINARY_INTEGER := 8;
38 
39 KU$_FILE_TYPE_DUMP_FILE        CONSTANT BINARY_INTEGER := 1;
40 KU$_FILE_TYPE_BAD_FILE         CONSTANT BINARY_INTEGER := 2;
41 KU$_FILE_TYPE_LOG_FILE         CONSTANT BINARY_INTEGER := 3;
42 KU$_FILE_TYPE_SQL_FILE         CONSTANT BINARY_INTEGER := 4;
43 
44 KU$_DUMPFILE_TYPE_DISK         CONSTANT BINARY_INTEGER := 0;
45 KU$_DUMPFILE_TYPE_PIPE         CONSTANT BINARY_INTEGER := 1;
46 KU$_DUMPFILE_TYPE_TAPE         CONSTANT BINARY_INTEGER := 2;
47 KU$_DUMPFILE_TYPE_TEMPLATE     CONSTANT BINARY_INTEGER := 3;
48 
49 KU$_STATUS_VERSION_1           CONSTANT NUMBER := 1;
50 KU$_STATUS_VERSION_2           CONSTANT NUMBER := 2;
51 KU$_STATUS_VERSION_3           CONSTANT NUMBER := 3;
52 KU$_STATUS_VERSION_4           CONSTANT NUMBER := 4;
53 KU$_STATUS_VERSION             CONSTANT NUMBER := KU$_STATUS_VERSION_4;
54 
55 KU$_JOB_VIEW_ALL               CONSTANT NUMBER := 0;
56 KU$_JOB_VIEW_TTS_TABLESPACES   CONSTANT NUMBER := 1;
57 KU$_JOB_VIEW_ENCCOL_TABLES     CONSTANT NUMBER := 2;
58 
59 KU$_JOB_COMPLETE               CONSTANT NUMBER := 1;
60 KU$_JOB_COMPLETE_ERRORS        CONSTANT NUMBER := 2;
61 KU$_JOB_STOPPED                CONSTANT NUMBER := 3;
62 KU$_JOB_ABORTED                CONSTANT NUMBER := 4;
63 
64 --
65 -- Items codes for entry in a dump file info table (of type ku$_dumpfile_info).
66 --
67 -- NOTE: For each constant defined here there is a corresponding constant
68 --       defined in kupf.h named KUPF_DFINFO_xxx_IDX where xxx is VERSION,
69 --       MASTER_PRESENT, GUID, etc. Any changes to the constants defined
70 --       *MUST* be reflected to those defined in kupf.h
71 --
72 KU$_DFHDR_FILE_VERSION         CONSTANT NUMBER := 1;
73 KU$_DFHDR_MASTER_PRESENT       CONSTANT NUMBER := 2;
74 KU$_DFHDR_GUID                 CONSTANT NUMBER := 3;
75 KU$_DFHDR_FILE_NUMBER          CONSTANT NUMBER := 4;
76 KU$_DFHDR_CHARSET_ID           CONSTANT NUMBER := 5;
77 KU$_DFHDR_CREATION_DATE        CONSTANT NUMBER := 6;
78 KU$_DFHDR_FLAGS                CONSTANT NUMBER := 7;
79 KU$_DFHDR_JOB_NAME             CONSTANT NUMBER := 8;
80 KU$_DFHDR_PLATFORM             CONSTANT NUMBER := 9;
81 KU$_DFHDR_INSTANCE             CONSTANT NUMBER := 10;
82 KU$_DFHDR_LANGUAGE             CONSTANT NUMBER := 11;
83 KU$_DFHDR_BLOCKSIZE            CONSTANT NUMBER := 12;
84 KU$_DFHDR_DIRPATH              CONSTANT NUMBER := 13;
85 KU$_DFHDR_METADATA_COMPRESSED  CONSTANT NUMBER := 14;
86 KU$_DFHDR_DB_VERSION           CONSTANT NUMBER := 15;
87 KU$_DFHDR_MASTER_PIECE_COUNT   CONSTANT NUMBER := 16;
88 KU$_DFHDR_MASTER_PIECE_NUMBER  CONSTANT NUMBER := 17;
89 KU$_DFHDR_DATA_COMPRESSED      CONSTANT NUMBER := 18;
90 KU$_DFHDR_METADATA_ENCRYPTED   CONSTANT NUMBER := 19;
91 KU$_DFHDR_DATA_ENCRYPTED       CONSTANT NUMBER := 20;
92 KU$_DFHDR_COLUMNS_ENCRYPTED    CONSTANT NUMBER := 21;
93 --
94 -- Item codes KU$_DFHDR_ENCPWD_MODE and KU$_DFHDR_ENCPWD_MODE_xxx
95 -- are obsolescent and will be removed in a future version. Instead,
96 -- KU$_DFHDR_ENCRYPTION_MODE and KU$_DFHDR_ENCMODE_xxx should be used.
97 --
98 KU$_DFHDR_ENCPWD_MODE          CONSTANT NUMBER := 22;
99   KU$_DFHDR_ENCPWD_MODE_UNKNOWN   CONSTANT NUMBER := 1;
100   KU$_DFHDR_ENCPWD_MODE_NONE      CONSTANT NUMBER := 2;
101   KU$_DFHDR_ENCPWD_MODE_PASSWORD  CONSTANT NUMBER := 3;
102   KU$_DFHDR_ENCPWD_MODE_DUAL      CONSTANT NUMBER := 4;
103   KU$_DFHDR_ENCPWD_MODE_TRANS     CONSTANT NUMBER := 5;
104 
105 --
106 -- KU$_DFHDR_ENCMODE_xxx are values that can be returned
107 -- for item code KU$_DFHDR_ENCRYPTION_MODE.
108 --
109 KU$_DFHDR_ENCRYPTION_MODE      CONSTANT NUMBER := 22;
110   KU$_DFHDR_ENCMODE_UNKNOWN       CONSTANT NUMBER := 1;
111   KU$_DFHDR_ENCMODE_NONE          CONSTANT NUMBER := 2;
112   KU$_DFHDR_ENCMODE_PASSWORD      CONSTANT NUMBER := 3;
113   KU$_DFHDR_ENCMODE_DUAL          CONSTANT NUMBER := 4;
114   KU$_DFHDR_ENCMODE_TRANS         CONSTANT NUMBER := 5;
115 
116 --
117 -- KU$_DFHDR_CMPALG_xxx are values that can be returned
118 -- for item code KU$_DFHDR_COMPRESSION_ALG.
119 --
120 KU$_DFHDR_COMPRESSION_ALG      CONSTANT NUMBER := 23;
121   KU$_DFHDR_CMPALG_UNKNOWN        CONSTANT NUMBER := 1;
122   KU$_DFHDR_CMPALG_NONE           CONSTANT NUMBER := 2;
123   KU$_DFHDR_CMPALG_BASIC          CONSTANT NUMBER := 3;
124   KU$_DFHDR_CMPALG_LOW            CONSTANT NUMBER := 4;
125   KU$_DFHDR_CMPALG_MEDIUM         CONSTANT NUMBER := 5;
126   KU$_DFHDR_CMPALG_HIGH           CONSTANT NUMBER := 6;
127 
128 KU$_DFHDR_MAX_ITEM_CODE        CONSTANT NUMBER := 23;
129 
130 KU$_COMPRESS_NONE              CONSTANT NUMBER := 1;
131 KU$_COMPRESS_METADATA          CONSTANT NUMBER := 2;
132 
133 -- Bitmask defs used in DATA_OPTIONS parameter. Values above 1048576 are
134 -- reserved for internal use.
135 
136 KU$_DATAOPT_SKIP_CONST_ERR       CONSTANT NUMBER := 1;
137 KU$_DATAOPT_XMLTYPE_CLOB         CONSTANT NUMBER := 2;
138 KU$_DATAOPT_NOTYPE_EVOL          CONSTANT NUMBER := 4;
139 KU$_DATAOPT_DISABL_APPEND_HINT   CONSTANT NUMBER := 8;
140 KU$_DATAOPT_REJECT_ROWS_REPCHR   CONSTANT NUMBER := 16;
141 
142 -- Bitmask defs for the flags field of dictionary table impcalloutreg$
143 -- See dtools.bsq for detailed descriptions
144 
145 KU$_ICRFLAGS_IS_EXPR             CONSTANT NUMBER := 1;
146 KU$_ICRFLAGS_EARLY_IMPORT        CONSTANT NUMBER := 2;
147 KU$_ICRFLAGS_GET_DEPENDENTS      CONSTANT NUMBER := 4;
148 KU$_ICRFLAGS_EXCLUDE             CONSTANT NUMBER := 8;
149 KU$_ICRFLAGS_XDB_NO_TTS          CONSTANT NUMBER := 16;
150 
151 -- Values for 'prepost' in import callouts
152 
153 KU$_ICRPP_PREIMPORT              CONSTANT NUMBER := 0;
154 KU$_ICRPP_EARLY                  CONSTANT NUMBER := 2;
155 KU$_ICRPP_FINAL                  CONSTANT NUMBER := 1;
156 
157 -- Bitmask defs for flags field of data_remap
158 
159 KU$_DATA_REMAP_WITH_ROWID        CONSTANT NUMBER := 1;
160 
161 -------------
162 -- EXCEPTIONS
163 --      The following exceptions can be generated by the DBMS_DATAPUMP API:
164 -- INVALID_ARGVAL, PRIVILEGE_ERROR, INVALID_OPERATION,
165 -- OBJECT_NOT_FOUND, INVALID_HANDLE, INVALID_STATE, INCONSISTENT_ARGS,
166 -- JOB_EXISTS, NO_SUCH_JOB, INVALID_VALUE, SUCCESS_WITH_INFO
167 --
168   invalid_argval EXCEPTION; -- OK
169     PRAGMA EXCEPTION_INIT(invalid_argval, -39001);
170     invalid_argval_num NUMBER := -39001;
171 -- "Invalid argument value"
172 -- *Cause:  The user specified API parameters were of the wrong type or
173 --          value range.  Subsequent messages supplied by
174 --          DBMS_DATAPUMP.GET_STATUS will further describe the error.
175 -- *Action: Correct the bad argument and retry the API.
176 
177   invalid_operation EXCEPTION;
178     PRAGMA EXCEPTION_INIT(invalid_operation, -39002);
179     invalid_operation_num NUMBER := -39002;
180 -- "invalid operation"
181 -- *Cause:  The current API cannot be executed because of inconsistencies
182 --          between the API and the current definition of the job.
183 --          Subsequent messages supplied by DBMS_DATAPUMP.GET_STATUS
184 --          will further describe the error.
185 -- *Action: Modify the API call to be consistent with the current job or
186 --          redefine the job in a manner that will support the specified API.
187 
188   inconsistent_args EXCEPTION; -- OK
189     PRAGMA EXCEPTION_INIT(inconsistent_args, -39005);
190     inconsistent_args_num NUMBER := -39005;
191 -- "inconsistent arguments"
192 -- *Cause:  The current API cannot be executed because of inconsistencies
193 --          between arguments of the API call.
194 --          Subsequent messages supplied by DBMS_DATAPUMP.GET_STATUS
195 --          will further describe the error.
196 -- *Action: Modify the API call to be consistent with itself.
197 
198   privilege_error EXCEPTION;
199     PRAGMA EXCEPTION_INIT(privilege_error, -31631);
200     privilege_error_num NUMBER := -31631;
201 -- "privileges are required"
202 -- *Cause:  The necessary privileges are not available for operations such
203 --          as: restarting a job on behalf of another owner, using a device
204 --          as a member of the dump file set, or ommiting a directory
205 --          object associated with any of the various output files.
206 -- *Action: Select a different job to restart, try a different operation, or
207 --          contact a database administrator to acquire the needed privileges.
208 
209   invalid_handle EXCEPTION;
210     PRAGMA EXCEPTION_INIT(invalid_handle, -31623);
211     invalid_handle_num NUMBER := -31623;
212 -- "The current session is not attached to the specified handle"
213 -- *Cause:  User specified an incorrect handle for a job.
214 -- *Action: Make sure handle was returned by DBMS_DATAPUMP.OPEN call.
215 
216   invalid_state EXCEPTION;
217     PRAGMA EXCEPTION_INIT(invalid_state, -39004);
218     invalid_state_num NUMBER := -39004;
219 -- "invalid state"
220 -- *Cause:  The state of the job precludes the execution of the API.
221 -- *Action: Rerun the job to specify the API when the job is an appropriate
222 --          state.
223 
224   job_exists EXCEPTION;
225     PRAGMA EXCEPTION_INIT(job_exists, -31634);
226     job_exists_num NUMBER := -31634;
227 -- "job already exists"
228 -- *Cause:  Job creation or restart failed because a job having the selected
229 --          name is currently executing.  This also generally indicates that
230 --          a Master Table with that job name exists in the user schema.
231 -- *Action: Select a different job name, or stop the currently executing job
232 --          and re-try the operation (may require a DROP on the Master Table).
233 
234   no_such_job EXCEPTION;
235     PRAGMA EXCEPTION_INIT(no_such_job, -31626);
236     no_such_job_num NUMBER := -31626;
237 -- "job does not exist"
238 -- *Cause:  A invalid reference to a job which is no longer executing,
239 --          is not executing on the instance where the operation was
240 --          attempted, or that does not have a valid Master Table.
241 --          Refer to the secondary error messages that follow this one for
242 --          clarification concerning the problems with the Master Table.
243 -- *Action: Start a new job, or attach to an existing job that has a
244 --          valid Master Table.
245 
246   internal_error EXCEPTION;
247     PRAGMA EXCEPTION_INIT(internal_error, -39006);
248     internal_error_num NUMBER := -39006;
249 -- "internal error"
250 -- *Cause:  An unexpected error occurred while processing a DataPump job.
251 --          Subsequent messages supplied by DBMS_DATAPUMP.GET_STATUS
252 --          will further describe the error.
253 -- *Action: Contact Oracle Support.
254 
255   success_with_info EXCEPTION;
256     PRAGMA EXCEPTION_INIT(success_with_info, -31627);
257     success_with_info_num NUMBER := -31627;
258 -- "API call succeeded but more information is available."
259 -- *Cause: User specified job parameters that yielded informational messages.
260 -- *Action: Call DBMS_DATAPUMP.GET_STATUS to retrieve additional information.
261 
262   no_dumpfile_info EXCEPTION;
263     PRAGMA EXCEPTION_INIT(no_dumpfile_info, -39211);
267 --          filename and directory object.
264     no_dumpfile_info_num NUMBER := -39211;
265 -- "Unable to retrieve dumpfile information as specified."
266 -- *Cause:  User specified an invalid or inaccessible file with the specified
268 -- *Action: Retry the operation with a valid directory object and filename.
269 
270   warning_dv_noencrypt EXCEPTION;
271     PRAGMA EXCEPTION_INIT(warning_dv_noencrypt, -39327);
272     warning_dv_noencrypt_num NUMBER := -39327;
273 -- "Database Vault data is being stored unencrypted in dump file set."
274 -- *Cause:  No encryption was specified for an export job that involved data
275 --          from a Database Vault Realm.
276 -- *Action: No specific user action is required.  This is only a warning that
277 --          secure data may be readable from within the dump file set.
278 
279 ---------------------------
280 -- PROCEDURES AND FUNCTIONS
281 ---------------------------
282 --
283 -- ADD_DEVICE: Adds a sequential device to the dump file set for Export,
284 --             Import, or Sql_file operations.
285 -- PARAMETERS:
286 --      handle     - Handle of the job (returned by OPEN)
287 --      devicename - Name of the device being added.
288 --      volumesize - Backing store capacity for the device.
289 --
290 -- RETURNS:
291 --      None
292 -- EXCEPTIONS:
293 --      INVALID_HANDLE    - The current session is not attached to this handle
294 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
295 --                          parameter.
296 --      PRIVILEGE_ERROR   - User didn't have EXP_FULL_DATABASE or
297 --                          IMP_FULL_DATABASE role
298 --      INVALID_OPERATION - The file was specified for a Network or Estimate
299 --                          operation, or the file was specified for an
300 --                          executing import or sql_file operation.
301 --      SUCCESS_WITH_INFO - API succeeded but further information available
302 --                          through the get_status API
303 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
304 --                          exists
305 
306   PROCEDURE add_device (
307                 handle          IN  NUMBER,
308                 devicename      IN  VARCHAR2,
309                 volumesize      IN  VARCHAR2 DEFAULT NULL
310         );
311 
312 -- ADD_FILE: Adds a file to the dump file set for Export, Import, or Sql_file
313 --           operations as well as log files, bad files, and sql files.
314 -- PARAMETERS:
315 --      handle    - Handle of the job (returned by OPEN)
316 --      filename  - Name of the file being added. This must be a simple
317 --                  filename with no directory path information if the
318 --                  directory parameter is specified. Filename can contain
319 --                  substitution characters to use this name as a template to
320 --                  create multiple files.
321 --      directory - Name of the directory object within the database that is
322 --                  used to locate the filename. Users with
323 --                  IMP_FULL_DATABASE or EXP_FULL_DATABASE roles can
324 --                  specify the directory path in the filename, but other
325 --                  users must specify this parameter.
326 --      filesize  - Size of the file being added. It may be specified as number
327 --                  of bytes, number of kilobytes (followed by 'K'), number of
328 --                  megabytes (followed by 'M'), number of gigabytes
329 --                  (followed by 'G') or the number of terabytes (followed
330 --                  by 'T'). This parameter is ignored for import and
331 --                  sql_file operations. On export operations, no more than
332 --                  the specified number of bytes will be written to the file,
333 --                  and if there is insufficient space on the device, the
334 --                  operation will fail. If not specified on export, the
335 --                  default will be unlimited size with allocations in 50 Mbyte
336 --                  increments. The minimum allowed filesize is 10 times the
337 --                  default block size for the file system.
338 --      filetype  - Type of file being added to the job. This numeric constant
339 --                  indicates whether it is a dump file, log file, bad file,
340 --                  or sql file being added to the job.
341 --     reusefile  - Flag indicating whether or not an existing dumpfile
342 --                  should be reused (i.e., overwritten) during an export
343 --                  operation. Valid values are:
344 --                    NULL - use the default behavior for the file type:
345 --                           for dump files, the default is do not reuse;
346 --                           for log and sql files, the default is reuse.
347 --                       0 - do not reuse (only meaningful for dump files).
348 --                       1 - reuse (only meaningful for dump files).
349 --
350 -- RETURNS:
351 --      None
352 -- EXCEPTIONS:
353 --      INVALID_HANDLE    - The current session is not attached to this handle
354 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
355 --                          parameter.
356 --      INVALID_STATE     - The current job state does not allow for the
357 --                          addition of files to the job (only for SQL and
358 --                          LOG files)
359 --      INVALID_OPERATION - The file was specified for a Network or Estimate
360 --                          operation, or the file was specified for an
361 --                          executing import or sql_file operation.
362 --      SUCCESS_WITH_INFO - API succeeded but further information available
363 --                          through the get_status API
364 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
365 --                          exists
366 
367   PROCEDURE add_file (
368                 handle          IN  NUMBER,
372                 filetype        IN  NUMBER DEFAULT KU$_FILE_TYPE_DUMP_FILE,
369                 filename        IN  VARCHAR2,
370                 directory       IN  VARCHAR2 DEFAULT NULL,
371                 filesize        IN  VARCHAR2 DEFAULT NULL,
373                reusefile        IN  NUMBER DEFAULT NULL
374         );
375 
376 
377 -- ATTACH: Acquire access to an active or stopped job
378 --
379 -- PARAMETERS:
380 --      job_name  - Identifies the particular job or operation. It will default
381 --                  to the name of a job owned by the user specified by
382 --                  job_owner if that user has only one job in the defining,
383 --                  executing, idling, waiting, or completing states.
384 --      job_owner - The user that started the job. If NULL, it defaults to the
385 --                  owner of the current session. To specify a different
386 --                  job_owner (than themselves), users must have
387 --                  IMP_FULL_DATABASE or EXP_FULL_DATABASE roles
388 --                  (depending on the operation)
389 --
390 -- RETURNS:
391 --      A handle to be used in subsequent calls to all other DBMS_DATAPUMP
392 --      operations.
393 -- EXCEPTIONS:
394 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
395 --                          parameter.
396 --      INVALID_OPERATION - This operation can not be restarted
397 --      SUCCESS_WITH_INFO - API succeeded but further information available
398 --                          through the get_status API
399 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
400 --                          exists
401 
402   FUNCTION attach (
403                 job_name        IN  VARCHAR2 DEFAULT NULL,
404                 job_owner       IN  VARCHAR2 DEFAULT NULL
405         )
406         RETURN NUMBER;
407 
408 
409 -- DATA_FILTER: Filter data using subqueries or excluding all user data
410 --
411 -- PARAMETERS:
412 --      handle      - Identifies the particular job or operation (from OPEN)
413 --      name        - The filter name or type to use (see documentation)
414 --      value       - Filter details
415 --      table_name  - Table name for applying the filter. Will default to all
416 --                    all tables if not specified.
417 --      schema_name - Name of the schema owning the table to apply the data
418 --                    filter. Null implies all schemas in the job.
419 --
420 -- RETURNS:
421 --      NONE
422 -- EXCEPTIONS:
423 --      INVALID_HANDLE    - The current session is not attached to this handle
424 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
425 --                          parameter.
426 --      PRIVILEGE_ERROR   - The specified operation requires privileges
427 --      INVALID_STATE     - The job is not in the defining state
428 --      INCONSISTENT_ARGS - The datatype of value does not match the filter
429 --                          name
430 --      SUCCESS_WITH_INFO - API succeeded but further information available
431 --                          through the get_status API
432 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
433 --                          exists
434 
435   PROCEDURE data_filter (
436                 handle          IN  NUMBER,
437                 name            IN  VARCHAR2,
438                 value           IN  NUMBER,
439                 table_name      IN  VARCHAR2 DEFAULT NULL,
440                 schema_name     IN  VARCHAR2 DEFAULT NULL
441         );
442 
443   PROCEDURE data_filter (
444                 handle          IN  NUMBER,
445                 name            IN  VARCHAR2,
446                 value           IN  CLOB,
447                 table_name      IN  VARCHAR2 DEFAULT NULL,
448                 schema_name     IN  VARCHAR2 DEFAULT NULL
449         );
450 
451   PROCEDURE data_filter (
452                 handle          IN  NUMBER,
453                 name            IN  VARCHAR2,
454                 value           IN  VARCHAR2,
455                 table_name      IN  VARCHAR2 DEFAULT NULL,
456                 schema_name     IN  VARCHAR2 DEFAULT NULL
457         );
458 
459 
460 -- DATA_REMAP: Modify the values of data in user tables
461 --
462 -- PARAMETERS:
463 --      handle      - Identifies the particular job or operation (from OPEN)
464 --      name        - The type of data remapping to be performed
465 --      table_name  - Table name for applying the remap.
466 --      column      - Column name for where the data needs to be remapped
467 --      function    - Function used to remap the column data.
468 --      schema      - Name of the schema owning the table to apply the data
469 --                    remap. Null implies all schemas in the job.
470 --      flags       - flags to modify data remap.
471 --                    current flags are: with_rowid - send in rowid to function
472 --
473 -- RETURNS:
474 --      NONE
475 -- EXCEPTIONS:
476 --      INVALID_HANDLE    - The current session is not attached to this handle
477 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
478 --                          parameter.
479 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
480 --                          exists
481 
482   PROCEDURE data_remap (
483                 handle          IN  NUMBER,
484                 name            IN  VARCHAR2,
485                 table_name      IN  VARCHAR2,
486                 column          IN  VARCHAR2,
487                 function        IN  VARCHAR2,
488                 schema          IN  VARCHAR2 DEFAULT NULL,
489                 remap_flags     IN  NUMBER DEFAULT 0
490         );
491 
492 
493 -- DETACH: Detach current session (and handle) from job
494 --
498 -- RETURNS:
495 -- PARAMETERS:
496 --      handle  - Identifies the particular job or operation (from OPEN/ATTACH)
497 --
499 --      NONE
500 -- EXCEPTIONS:
501 --      INVALID_HANDLE    - The current session is not attached to this handle
502 --      SUCCESS_WITH_INFO - API succeeded but further information available
503 --                          through the get_status API
504 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
505 --                          exists
506 
507   PROCEDURE detach (
508                 handle          IN  NUMBER
509         );
510 
511 
512 -- GET_DUMPFILE_INFO: Analyze specified file and return dumpfile information
513 --
514 -- PARAMETERS:
515 --      filename   - Name of the file being added. This must be a simple
516 --                   filename with no directory path information.
517 --      directory  - Name of the directory object within the database that is
518 --                   used to locate the filename.
519 --      info_table - (OUT) The ku$_dumpfile_info table to be
520 --                   populated with dumpfile header information.
521 --      filetype   - (OUT) 0 => Unknown file type
522 --                         1 => Data Pump dumpfile
523 --                         2 => Classic Export dumpfile
524 -- RETURNS:
525 --      NONE
526 --
527 -- EXCEPTIONS:
528 --      Fatal exceptions raised - all others eaten
529 
530   PROCEDURE get_dumpfile_info (
531                 filename        IN     VARCHAR2,
532                 directory       IN     VARCHAR2,
533                 info_table      OUT    ku$_dumpfile_info,
534                 filetype        OUT    NUMBER
535         );
536 
537 
538 -- GET_STATUS: Get status of job (for monitoring and control)
539 --
540 -- PARAMETERS:
541 --      handle      - Identifies the particular job (from OPEN/ATTACH)
542 --      mask        - Bit mask to specify the information to be returned:
543 --                      Bit 0  - Retrieve work in progress (wip) information
544 --                      Bit 1  - Retrieve job complete description information
545 --                      Bit 2  - Retrieve detailed job and per-worker progress
546 --                               and status (NOTE: Retrieving the job status
547 --                               and checking the job state for 'COMPLETED' is
548 --                               the proper mechanism for detecting that a job
549 --                               has completed successfully. Once the job has
550 --                               entered this state, subsequent calls will
551 --                               likely result in an invalid_handle exception)
552 --                      Bit 3  - Retrieve error packet/log entry information
553 --      timeout    - Max seconds to wait if no pending status queue entries.
554 --                   Specifying zero or null results in an immediate return
555 --                   and -1 will wait indefinitely. The timeout will be
556 --                   ignored when the job is in the 'COMPLETING' or
557 --                   'COMPLETED' states.
558 --      job_state  - (OUT) Current job state of this Data Pump job (newer
559 --                   procedural interface only)
560 --      status     - (OUT) ku$_Status object with requested information in the
561 --                   newer procedural interface only
562 --
563 -- RETURNS:
564 --      ku$_Status object with requested information in the initial functional
565 --      interface only (to be deprecated)
566 --
567 -- EXCEPTIONS:
568 --      INVALID_HANDLE    - The current session is not attached to this handle
569 --      SUCCESS_WITH_INFO - API succeeded but further information available
570 --                          through the get_status API
571 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
572 --                          exists
573 
574   FUNCTION get_status (
575                 handle          IN  NUMBER,
576                 mask            IN  INTEGER,
577                 timeout         IN  NUMBER DEFAULT NULL
578         )
579         RETURN ku$_Status;
580 
581   PROCEDURE get_status (
582                 handle          IN  NUMBER,
583                 mask            IN  INTEGER,
584                 timeout         IN  NUMBER DEFAULT NULL,
585                 job_state       OUT VARCHAR2,
586                 status          OUT ku$_Status1010
587         );
588 
589   PROCEDURE get_status (
590                 handle          IN  NUMBER,
591                 mask            IN  INTEGER,
592                 timeout         IN  NUMBER DEFAULT NULL,
593                 job_state       OUT VARCHAR2,
594                 status          OUT ku$_Status1020
595         );
596 
597   PROCEDURE get_status (
598                 handle          IN  NUMBER,
599                 mask            IN  INTEGER,
600                 timeout         IN  NUMBER DEFAULT NULL,
601                 job_state       OUT VARCHAR2,
602                 status          OUT ku$_Status1120
603         );
604 
605   PROCEDURE get_status (
606                 handle          IN  NUMBER,
607                 mask            IN  INTEGER,
608                 timeout         IN  NUMBER DEFAULT NULL,
609                 job_state       OUT VARCHAR2,
610                 status          OUT ku$_Status1210
611         );
612 
613 -- LOG_ENTRY: Add entry to log file and broadcast to all get_status callers
614 --
615 -- PARAMETERS:
616 --      handle        - Identifies the particular job (from OPEN/ATTACH)
617 --      message       - Text to be added to the log file
618 --      log_file_only - Specified text to be written to the log file only and
619 --                      not to the status queues like other log messages
620 --
621 -- RETURNS:
625 --      INVALID_HANDLE    - The current session is not attached to this handle
622 --      NONE
623 --
624 -- EXCEPTIONS:
626 --      SUCCESS_WITH_INFO - API succeeded but further information available
627 --                          through the get_status API
628 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
629 --                          exists
630 
631   PROCEDURE log_entry (
632                 handle          IN  NUMBER,
633                 message         IN  VARCHAR2,
634                 log_file_only   IN  NUMBER DEFAULT 0
635         );
636 
637 
638 -- METADATA_FILTER: Applies transformations to objects' DDL during import,
639 --                  network, and sql_file operations
640 --
641 -- PARAMETERS:
642 --      handle      - Identifies the particular job or operation (from OPEN)
643 --      name        - Name of the metadata filter (see documentation).
644 --      value       - Text expression for the filter in the name parameter
645 --      object_path - The object path to which the filter applies (default=all
646 --                    objects).
647 --      object_type - For backward compatibility, can be used to specify the
648 --                    object path (see above)
649 --
650 -- RETURNS:
651 --      NONE
652 -- EXCEPTIONS:
653 --      INVALID_HANDLE    - The current session is not attached to this handle
654 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
655 --                          parameter.
656 --      INVALID_STATE     - The job is not in the defining state
657 --      INCONSISTENT_ARGS - The value specification does not match the metadata
658 --                          filter name
659 --      SUCCESS_WITH_INFO - API succeeded but further information available
660 --                          through the get_status API
661 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
662 --                          exists
663 
664   PROCEDURE metadata_filter (
665                 handle          IN  NUMBER,
666                 name            IN  VARCHAR2,
667                 value           IN  VARCHAR2,
668                 object_path     IN  VARCHAR2 DEFAULT NULL,
669                 object_type     IN  VARCHAR2 DEFAULT NULL
670         );
671 
672   PROCEDURE metadata_filter (
673                 handle          IN  NUMBER,
674                 name            IN  VARCHAR2,
675                 value           IN  CLOB,
676                 object_path     IN  VARCHAR2 DEFAULT NULL,
677                 object_type     IN  VARCHAR2 DEFAULT NULL
678         );
679 
680 -- METADATA_TRANSFORM: Allows transformations applied to objects during jobs
681 --
682 -- PARAMETERS:
683 --      handle      - Identifies the particular job or operation (from OPEN)
684 --      name        - Name of the transformation (see documentation).
685 --      value       - The value of the parameter for the transform
686 --      object_type - The object type to which the transform applies
687 --                    (default=all objects)
688 --
689 -- RETURNS:
690 --      NONE
691 -- EXCEPTIONS:
692 --      INVALID_HANDLE    - The current session is not attached to this handle
693 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
694 --                          parameter.
695 --      INVALID_STATE     - The job is not in the defining state or operation
696 --                          is either export or estimate which don't support
697 --                          transforms.
698 --      INVALID_OPERATION - Transforms not permitted for this operation
699 --      INCONSISTENT_ARGS - The value specification does not match the metadata
700 --                          transform name
701 --      SUCCESS_WITH_INFO - API succeeded but further information available
702 --                          through the get_status API
703 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
704 --                          exists
705 
706   PROCEDURE metadata_transform (
707                 handle          IN  NUMBER,
708                 name            IN  VARCHAR2,
709                 value           IN  VARCHAR2,
710                 object_type     IN  VARCHAR2 DEFAULT NULL
711         );
712 
713   PROCEDURE metadata_transform (
714                 handle          IN  NUMBER,
715                 name            IN  VARCHAR2,
716                 value           IN  NUMBER,
717                 object_type     IN  VARCHAR2 DEFAULT NULL
718         );
719 
720 
721 -- METADATA_REMAP: Allows remappings applied to objects during jobs
722 --
723 -- PARAMETERS:
724 --      handle      - Identifies the particular job or operation (from OPEN)
725 --      name        - Name of the mapping to occur (see documentation).
726 --      old_value   - Previous value to reset to new value (value parameter)
727 --      value       - The value of the parameter for the mapping
728 --      object_type - The object type to which the mapping applies (default=all
729 --                    objects)
730 --
731 -- RETURNS:
732 --      NONE
733 -- EXCEPTIONS:
734 --      INVALID_HANDLE    - The current session is not attached to this handle
735 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
736 --                          parameter.
737 --      INVALID_STATE     - The job is not in the defining state or operation
738 --                          is either export or estimate which don't support
739 --                          transforms.
740 --      INVALID_OPERATION - Remaps not permitted for this operation
741 --      INCONSISTENT_ARGS - The value specification does not match the metadata
742 --                          transform name
743 --      SUCCESS_WITH_INFO - API succeeded but further information available
744 --                          through the get_status API
748   PROCEDURE metadata_remap (
745 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
746 --                          exists
747 
749                 handle          IN  NUMBER,
750                 name            IN  VARCHAR2,
751                 old_value       IN  VARCHAR2,
752                 value           IN  VARCHAR2,
753                 object_type     IN  VARCHAR2 DEFAULT NULL
754         );
755 
756 
757 
758 -- OPEN: Create a new DataPump job/operation
759 --
760 -- PARAMETERS:
761 --      operation       - The type of operation to be performed (EXPORT,
762 --                        IMPORT, SQL_FILE)
763 --      job_mode        - Operation mode (FULL, SCHEMA, TABLE, TABLESPACE,
764 --                        TRANSPORTABLE)
765 --      remote_link     - Link to source database to be used for network
766 --                        operations.
767 --      job_name        - Name of the job, implicitly qualified by the schema
768 --                        and must be unique to that schema
769 --      version         - Version of the database objects to be extracted (for
770 --                        export, estimate, and network only). Possible values
771 --                        are COMPATIBLE (the default), LATEST, or a specific
772 --                        database version.
773 --      compression     - Compression to use job-wide on export
774 --
775 -- RETURNS:
776 --      A handle to be used in all subsequent calls (except ATTACH)
777 --
778 -- EXCEPTIONS:
779 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
780 --                          parameter.
781 --      JOB_EXISTS        - A table having the name job_name already exists.
782 --      PRIVILEGE_ERROR   - User doesn't have the privilege to create the
783 --                          specified master table
784 --      INTERNAL_ERROR    - There was an internal error trying to create the
785 --                          DataPump job as specified
786 --      SUCCESS_WITH_INFO - API succeeded but further information available
787 --                          through the get_status API
788 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
789 --                          exists
790 
791   FUNCTION open (
792                 operation       IN  VARCHAR2,
793                 job_mode        IN  VARCHAR2,
794                 remote_link     IN  VARCHAR2 DEFAULT NULL,
795                 job_name        IN  VARCHAR2 DEFAULT NULL,
796                 version         IN  VARCHAR2 DEFAULT 'COMPATIBLE',
797                 compression     IN  NUMBER DEFAULT KU$_COMPRESS_METADATA
798         )
799         RETURN NUMBER;
800 
801 
802 -- SET_PARALLEL: Throttle the degree of parallelism within a job
803 --
804 -- PARAMETERS:
805 --      handle  - Identifies the particular job or operation (from OPEN/ATTACH)
806 --      degree  - Max number of worker processes that can be used for the job
807 --
808 -- RETURNS:
809 --      NONE
810 --
811 -- EXCEPTIONS:
812 --      INVALID_HANDLE    - The current session is not attached to this handle
813 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
814 --                          parameter.
815 --      INVALID_OPERATION - Changing the degree of parallelism is not permitted
816 --                          for this operation
817 --      SUCCESS_WITH_INFO - API succeeded but further information available
818 --                          through the get_status API
819 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
820 --                          exists
821 
822   PROCEDURE set_parallel (
823                 handle          IN  NUMBER,
824                 degree          IN  NUMBER
825         );
826 
827 
828 -- SET_PARAMETER: Specify a variety of processing options for a particular job
829 --
830 -- PARAMETERS:
831 --      handle  - Identifies the particular job or operation (from OPEN/ATTACH)
832 --      name    - Parameter name (see documentation).
833 --      value   - Value for this parameter
834 --
835 -- RETURNS:
836 --      NONE
837 --
838 -- EXCEPTIONS:
839 --      INVALID_HANDLE    - The current session is not attached to this handle
840 --      INVALID_ARGVAL    - A NULL or invalid value was supplied for an input
841 --                          parameter.
842 --      INVALID_STATE     - Job must be in the defining state
843 --      INCONSISTENT_ARGS - Datatype of value inconsistent with parameter type
844 --      INVALID_OPERATION - The specified parameter is not allowed for the
845 --                          current operation
846 --      PRIVILEGE_ERROR   - The specified parameter requires privileges
847 --      SUCCESS_WITH_INFO - API succeeded but further information available
848 --                          through the get_status API
849 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
850 --                          exists
851 
852   PROCEDURE set_parameter (
853                 handle          IN  NUMBER,
854                 name            IN  VARCHAR2,
855                 value           IN  VARCHAR2
856         );
857 
858   PROCEDURE set_parameter (
859                 handle          IN  NUMBER,
860                 name            IN  VARCHAR2,
861                 value           IN  NUMBER
862         );
863 
864 
865 -- START_JOB: Start or restart a job
866 --
867 -- PARAMETERS:
868 --      handle       - Identifies the particular job or operation (from
869 --                     OPEN/ATTACH)
870 --      skip_current - If set (on a restart only - ignored on initial start),
871 --                     will cause incomplete actions from a previous start to
872 --                     be skipped. Default is false.
873 --      abort_step   - For testing only
877 --      service_name - If specified, indicates a service name used to constain
874 --      cluster_ok   - If =0, all workers are started in the current intance.
875 --                     Otherwise, workers are started on instances usable by
876 --                     the job.
878 --                     the job to specific instances or to a specific resource
879 --                     group.
880 --
881 -- RETURNS:
882 --      NONE
883 --
884 -- EXCEPTIONS:
885 --      INVALID_HANDLE    - The current session is not attached to this handle
886 --      INVALID_STATE     - Job can't be started due to insufficient info
887 --      INVALID_OPERATION - The operation as defined has insufficient or
888 --                          conflicting attributes and can not be started
889 --      SUCCESS_WITH_INFO - API succeeded but further information available
890 --                          through the get_status API
891 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
892 --                          exists
893 
894   PROCEDURE start_job (
895                 handle          IN  NUMBER,
896                 skip_current    IN  NUMBER DEFAULT 0,
897                 abort_step      IN  NUMBER DEFAULT 0,
898                 cluster_ok      IN  NUMBER DEFAULT 1,
899                 service_name    IN  VARCHAR2 DEFAULT NULL
900         );
901 
902 
903 -- STOP_JOB: Terminate a job while preserving its state
904 --
905 -- PARAMETERS:
906 --      handle      - Identifies the particular job or operation (from
907 --                    OPEN/ATTACH). This handle is detached on successful
908 --                    completion of this call.
909 --      immediate   - If true, worker processes are aborted immediately instead
910 --                    of being allowed to complete their current work items.
911 --                    This halts the job more rapidly at the expense of having
912 --                    to rerun parts of the job on a restart.
913 --      keep_master - If non-zero, the master table is retained when the job is
914 --                    stopped. If zero, the master table is dropped when the
915 --                    job is stopped. If null, retention is based on the
916 --                    KEEP_MASTER parameter setting.
917 --      delay       - Number of seconds that should be waited until other
918 --                    attached sessions are forcibly detached. The delay
919 --                    allows other sessions attached to the job to be notified
920 --                    that a stop has been performed. If delay=0 is specified,
921 --                    other attached sessions will find their handles are
922 --                    invalid at their next calls to the datapump API.
923 --
924 -- RETURNS:
925 --      NONE
926 --
927 -- EXCEPTIONS:
928 --      INVALID_HANDLE    - The current session is not attached to this handle
929 --      INVALID_OPERATION - This operation can not be stopped
930 --      SUCCESS_WITH_INFO - API succeeded but further information available
931 --                          through the get_status API
932 --      NO_SUCH_JOB       - The job handle is no longer valid or job no longer
933 --                          exists
934 
935   PROCEDURE stop_job (
936                 handle          IN  NUMBER,
937                 immediate       IN  NUMBER DEFAULT 0,
938                 keep_master     IN  NUMBER DEFAULT NULL,
939                 delay           IN  NUMBER DEFAULT 60
940         );
941 
942 -- WAIT_FOR_JOB: Wait for job to complete and then return
943 --
944 -- PARAMETERS:
945 --      handle      - Identifies the particular job or operation (from
946 --                    OPEN/ATTACH). This handle is detached on successful
947 --                    completion of this call.
948 --      job_state   - (OUT) The job state at job completion
949 -- RETURNS:
950 --      NONE
951 --
952 -- EXCEPTIONS:
953 --      Fatal exceptions raised - all others eaten
954 
955   PROCEDURE wait_for_job (
956                 handle          IN  NUMBER,
957                 job_state       OUT VARCHAR2
958         );
959 
960 
961 ---------------------------------------------------------------
962 -- The following routines are for internal Oracle consumers
963 -- such as consumers of the Data Pump extensibility framework.
964 ---------------------------------------------------------------
965 
966 
967 -- DATAPUMP_JOB: Is it or is it not?
968 
969   FUNCTION datapump_job RETURN BOOLEAN;
970 
971 -- Privs - Yes or no?
972 
973   FUNCTION has_privs(
974                 oper  IN VARCHAR2)
975     RETURN BOOLEAN;
976 
977 -- Establish remote Data Pump job context
978 
979   PROCEDURE establish_remote_context(
980                 worker_id    IN NUMBER,
981                 remote_link  IN VARCHAR2);
982 
983   PROCEDURE set_remote_worker(
984                 worker_id    IN NUMBER);
985 
986 -- Set up remote Data Pump job context
987 
988   PROCEDURE setup_remote_context(
989                 user_name       IN VARCHAR2,
990                 job_name        IN VARCHAR2,
991                 version         IN NUMBER,
992                 status_xml      IN VARCHAR2,
993                 status_xml_len  IN NUMBER,
994                 more            IN NUMBER);
995 
996 -- To determine ku$_Status object version to use for network operations
997 
998   FUNCTION get_status_version(
999                 version  IN NUMBER)
1000     RETURN NUMBER;
1001 
1002 -- Test remote Data Pump job context
1003 
1004   PROCEDURE test_remote_context1010;
1005   PROCEDURE test_remote_context1020;
1006   PROCEDURE test_remote_context1120;
1007   PROCEDURE test_remote_context1210;
1008 
1009 -- LOG_ERROR: Add error to log file and broadcast to all get_status callers
1010 
1014                 error_number    IN  NUMBER DEFAULT 0,
1011   PROCEDURE log_error (
1012                 handle          IN  NUMBER,
1013                 message         IN  VARCHAR2,
1015                 fatal_error     IN  NUMBER DEFAULT 0,
1016                 log_file_only   IN  NUMBER DEFAULT 0
1017         );
1018 
1019 -- Create view into master table for a job
1020 
1021   PROCEDURE create_job_view (
1022                 job_schema      IN VARCHAR2,
1023                 job_name        IN VARCHAR2,
1024                 view_name       IN VARCHAR2,
1025                 view_type       IN NUMBER DEFAULT KU$_JOB_VIEW_ALL
1026         );
1027 
1028   PROCEDURE create_job_view (
1029                 handle          IN NUMBER,
1030                 view_name       IN VARCHAR2,
1031                 view_type       IN NUMBER DEFAULT KU$_JOB_VIEW_ALL
1032         );
1033 
1034 -- SET_DEBUG: Enable debug/trace features - pre 11.0
1035 -- PARAMETERS:
1036 --      on_off          - new switch state.
1037 --      ip_addr         - IP Address to connected to jdeveloper
1038 
1039   PROCEDURE set_debug (
1040                 on_off          IN NUMBER,
1041                 ip_addr         IN VARCHAR2 DEFAULT NULL
1042         );
1043 
1044 -- SET_DEBUG: Enable debug/trace features - 11.0 forward
1045 -- PARAMETERS:
1046 --      debug_flags:  Trace/debug flags from /TRACE param, or event, and
1047 --                    possibly global trace/debug flags
1048 --      version_flag: Any integer, no default
1049 
1050   PROCEDURE set_debug (
1051                 debug_flags     IN BINARY_INTEGER,
1052                 version_flag    IN BINARY_INTEGER
1053         );
1054 
1055 -- TRACE_ENTRY: Write a trace msg. to the current process's trace file
1056 --      if options debugging is turned on.
1057 
1058   PROCEDURE trace_entry (
1059                 facility        IN VARCHAR2,
1060                 msg             IN VARCHAR2
1061         );
1062 
1063 -- Temporary home for clob helper routines
1064  PROCEDURE client_lob_append (
1065                 value           IN  VARCHAR2,
1066                 position        IN  NUMBER,
1067                 as_is           IN  NUMBER DEFAULT 0
1068        );
1069  FUNCTION client_lob_get RETURN CLOB;
1070  PROCEDURE client_lob_delete;
1071 
1072  PROCEDURE dh_key_exchange(
1073                 handle          IN  NUMBER,
1074                 a               IN VARCHAR2,
1075                 b               IN VARCHAR2,
1076                 c              OUT VARCHAR2,
1077                 d              OUT VARCHAR2
1078         );
1079 
1080 END DBMS_DATAPUMP;