54:
55: /*
56: The following steps list the basic activities involved in using the Data Pump API.
57: The steps are presented in the order in which the activities would generally be performed:
58: 1. Execute the DBMS_DATAPUMP.OPEN procedure to create a Data Pump job and its infrastructure.
59: 2. Define any parameters for the job.
60: 3. Start the job.
61: 4. Optionally, monitor the job until it completes.
62: 5. Optionally, detach from the job and reattach at a later time.
114: put_stream(g_log, 'DATAPUMP EXPORT JOB NAME : ' || l_job_name);
115:
116: put_stream(g_log, 'DATAPUMP EXPORT LOG FILE NAME : ' || l_log_file );
117:
118: -- 1. Execute the DBMS_DATAPUMP.OPEN procedure to create a Data Pump job and its infrastructure.
119: l_dp_handle := DBMS_DATAPUMP.open(
120: operation => 'EXPORT',
121: job_mode => 'TRANSPORTABLE',
122: remote_link => NULL,
115:
116: put_stream(g_log, 'DATAPUMP EXPORT LOG FILE NAME : ' || l_log_file );
117:
118: -- 1. Execute the DBMS_DATAPUMP.OPEN procedure to create a Data Pump job and its infrastructure.
119: l_dp_handle := DBMS_DATAPUMP.open(
120: operation => 'EXPORT',
121: job_mode => 'TRANSPORTABLE',
122: remote_link => NULL,
123: job_name => l_job_name,
126: put_stream(g_log,' STEP 1 done - Create an handle for DATAPUMP TRANSPORTABLE EXPORT JOB');
127:
128: -- 2. Define any parameters for the job.
129: -- 2.1 Add dump file parameter
130: DBMS_DATAPUMP.add_file(
131: handle => l_dp_handle,
132: filename => l_dmp_file,
133: directory => g_directory_name,
134: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE);
130: DBMS_DATAPUMP.add_file(
131: handle => l_dp_handle,
132: filename => l_dmp_file,
133: directory => g_directory_name,
134: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE);
135:
136: put_stream(g_log,' STEP 2.1 done - Set the dump file parameter');
137:
138: --2.2 Add log file parameter.
135:
136: put_stream(g_log,' STEP 2.1 done - Set the dump file parameter');
137:
138: --2.2 Add log file parameter.
139: DBMS_DATAPUMP.add_file(
140: handle => l_dp_handle,
141: filename => l_log_file,
142: directory => g_directory_name,
143: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
139: DBMS_DATAPUMP.add_file(
140: handle => l_dp_handle,
141: filename => l_log_file,
142: directory => g_directory_name,
143: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
144:
145: put_stream(g_log,' STEP 2.2 done - Set the log file parameter');
146:
147: -- 2.3 Add TTS_FULL_CHECK parameter
144:
145: put_stream(g_log,' STEP 2.2 done - Set the log file parameter');
146:
147: -- 2.3 Add TTS_FULL_CHECK parameter
148: DBMS_DATAPUMP.set_parameter(
149: handle => l_dp_handle,
150: name=>'TTS_FULL_CHECK',
151: value=>1);
152:
151: value=>1);
152:
153: put_stream(g_log,' STEP 2.3 done - Set TTS_FULL_CHECK parameter');
154:
155: DBMS_DATAPUMP.metadata_filter(
156: handle => l_dp_handle,
157: Name => 'TABLESPACE_EXPR',
158: value => 'IN (''' || l_table_space || ''')' );
159:
158: value => 'IN (''' || l_table_space || ''')' );
159:
160: put_stream(g_log,' STEP 2.4 - Set TABLESPACE_EXPR IN ('''|| l_table_space || ''')');
161: -- 3 . Start the DATAPUMP job.
162: DBMS_DATAPUMP.start_job(l_dp_handle);
163:
164: put_stream(g_log, ' STEP 3 done - Start the EXPORT Job');
165:
166: -- 4. Optionally wait for the job to complete.
170: l_job_state := 'UNDEFINED';
171:
172: while (l_job_state <> 'COMPLETED') and (l_job_state <> 'STOPPED') loop
173:
174: DBMS_DATAPUMP.GET_STATUS(
175: l_dp_handle,
176: dbms_datapump.ku$_status_job_error +
177: dbms_datapump.ku$_status_job_status +
178: dbms_datapump.ku$_status_wip,
172: while (l_job_state <> 'COMPLETED') and (l_job_state <> 'STOPPED') loop
173:
174: DBMS_DATAPUMP.GET_STATUS(
175: l_dp_handle,
176: dbms_datapump.ku$_status_job_error +
177: dbms_datapump.ku$_status_job_status +
178: dbms_datapump.ku$_status_wip,
179: -1,
180: l_job_state,
173:
174: DBMS_DATAPUMP.GET_STATUS(
175: l_dp_handle,
176: dbms_datapump.ku$_status_job_error +
177: dbms_datapump.ku$_status_job_status +
178: dbms_datapump.ku$_status_wip,
179: -1,
180: l_job_state,
181: l_status);
174: DBMS_DATAPUMP.GET_STATUS(
175: l_dp_handle,
176: dbms_datapump.ku$_status_job_error +
177: dbms_datapump.ku$_status_job_status +
178: dbms_datapump.ku$_status_wip,
179: -1,
180: l_job_state,
181: l_status);
182:
196: put_stream(g_log, ' STEP 4 done - wait for the job to complete.');
197:
198: put_stream(g_log,' Final job state = ' || l_job_state );
199:
200: DBMS_DATAPUMP.detach(l_dp_handle);
201:
202: put_stream(g_log, ' STEP 5 done - Export job is done');
203:
204: put_stream(g_log, 'End of Procedure DATAPUMP_EXPORT : Export job is completed successfully ' );
214: put_stream(g_log, 'Exception in Data Pump export job . Exception Details : ');
215:
216: put_stream(g_output, 'Exception in Data Pump export job . Exception Details : ');
217:
218: dbms_datapump.get_status(
219: l_dp_handle,
220: dbms_datapump.ku$_status_job_error,
221: 0,
222: l_job_state,
216: put_stream(g_output, 'Exception in Data Pump export job . Exception Details : ');
217:
218: dbms_datapump.get_status(
219: l_dp_handle,
220: dbms_datapump.ku$_status_job_error,
221: 0,
222: l_job_state,
223: l_status);
224:
221: 0,
222: l_job_state,
223: l_status);
224:
225: IF ( bitand(l_status.mask,dbms_datapump.ku$_status_job_error) <> 0 ) THEN
226:
227: l_lerror := l_status.error;
228:
229: IF l_lerror IS NOT NULL THEN
259: END LOOP; -- WHILE l_ind IS NOT NULL
260:
261: END IF; -- IF l_lerror IS NOT NULL
262:
263: END IF; -- IF ( bitand(l_status.mask,dbms_datapump.ku$_status_job_error) <> 0 )
264:
265: -- Optionally STOP the job incase of exception.
266: DBMS_DATAPUMP.stop_job(
267: handle => l_dp_handle,
262:
263: END IF; -- IF ( bitand(l_status.mask,dbms_datapump.ku$_status_job_error) <> 0 )
264:
265: -- Optionally STOP the job incase of exception.
266: DBMS_DATAPUMP.stop_job(
267: handle => l_dp_handle,
268: immediate => 1,
269: keep_master => 0);
270:
267: handle => l_dp_handle,
268: immediate => 1,
269: keep_master => 0);
270:
271: DBMS_DATAPUMP.detach(l_dp_handle);
272:
273: retcode := 1;
274:
275: errbuf := sqlerrm;
286:
287: /*
288: The following steps list the basic activities involved in using the Data Pump API.
289: The steps are presented in the order in which the activities would generally be performed:
290: 1. Execute the DBMS_DATAPUMP.OPEN procedure to create a Data Pump job and its infrastructure.
291: 2. Define any parameters for the job.
292: 3. Start the job.
293: 4. Optionally, monitor the job until it completes.
294: 5. Optionally, detach from the job and reattach at a later time.
347: put_stream(g_log, 'DATAPUMP JOB NAME : ' || l_job_name);
348:
349: put_stream(g_log, 'DATAPUMP LOG FILE NAME : ' || l_log_file );
350:
351: -- 1. Execute the DBMS_DATAPUMP.OPEN procedure to create a Data Pump job and its infrastructure.
352: l_dp_handle := DBMS_DATAPUMP.open(
353: operation => 'IMPORT',
354: job_mode => 'TRANSPORTABLE',
355: remote_link => NULL,
348:
349: put_stream(g_log, 'DATAPUMP LOG FILE NAME : ' || l_log_file );
350:
351: -- 1. Execute the DBMS_DATAPUMP.OPEN procedure to create a Data Pump job and its infrastructure.
352: l_dp_handle := DBMS_DATAPUMP.open(
353: operation => 'IMPORT',
354: job_mode => 'TRANSPORTABLE',
355: remote_link => NULL,
356: job_name => l_job_name,
359: put_stream(g_log,' STEP 1 done - Create an handle for DATAPUMP TRANSPORTABLE IMPORT JOB');
360:
361: -- 2. Define any parameters for the job.
362: -- 2.1 Add dump file parameter
363: DBMS_DATAPUMP.add_file(
364: handle => l_dp_handle,
365: filename => l_dmp_file,
366: directory => g_directory_name,
367: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE);
363: DBMS_DATAPUMP.add_file(
364: handle => l_dp_handle,
365: filename => l_dmp_file,
366: directory => g_directory_name,
367: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_DUMP_FILE);
368:
369: put_stream(g_log,' STEP 2.1 done - Set the dump file parameter');
370:
371: --2.2 Add log file parameter.
368:
369: put_stream(g_log,' STEP 2.1 done - Set the dump file parameter');
370:
371: --2.2 Add log file parameter.
372: DBMS_DATAPUMP.add_file(
373: handle => l_dp_handle,
374: filename => l_log_file,
375: directory => g_directory_name,
376: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
372: DBMS_DATAPUMP.add_file(
373: handle => l_dp_handle,
374: filename => l_log_file,
375: directory => g_directory_name,
376: filetype => DBMS_DATAPUMP.KU$_FILE_TYPE_LOG_FILE);
377:
378: put_stream(g_log,' STEP 2.2 done - Set the log file parameter');
379:
380: -- -- 2.3 Add TTS_FULL_CHECK parameter
377:
378: put_stream(g_log,' STEP 2.2 done - Set the log file parameter');
379:
380: -- -- 2.3 Add TTS_FULL_CHECK parameter
381: -- DBMS_DATAPUMP.set_parameter(
382: -- handle => l_dp_handle,
383: -- name=>'TTS_FULL_CHECK',
384: -- value=>1);
385: --
410: SELECT SUBSTR(l_data_file_str, 1, INSTR(l_data_file_str , ',')-1) INTO l_data_file FROM dual;
411:
412: SELECT SUBSTR(l_data_file_str, INSTR(l_data_file_str , ',')+1) INTO l_data_file_str FROM dual;
413:
414: DBMS_DATAPUMP.SET_PARAMETER(
415: handle => l_dp_handle,
416: name=>'TABLESPACE_DATAFILE',
417: value=> l_data_file_dir || l_data_file);
418:
421: i := i + 1 ;
422:
423: END LOOP;
424:
425: DBMS_DATAPUMP.SET_PARAMETER(
426: handle => l_dp_handle,
427: name=>'TABLESPACE_DATAFILE',
428: value=>l_data_file_dir || l_data_file_str);
429:
431:
432: END;
433:
434: -- 3 . Start the DATAPUMP job.
435: DBMS_DATAPUMP.start_job(l_dp_handle);
436:
437: put_stream(g_log, ' STEP 3 done - Start the Job');
438:
439: -- 4. Optionally wait for the job to complete.
443: l_job_state := 'UNDEFINED';
444:
445: while (l_job_state <> 'COMPLETED') and (l_job_state <> 'STOPPED') loop
446:
447: DBMS_DATAPUMP.GET_STATUS(
448: l_dp_handle,
449: dbms_datapump.ku$_status_job_error +
450: dbms_datapump.ku$_status_job_status +
451: dbms_datapump.ku$_status_wip,
445: while (l_job_state <> 'COMPLETED') and (l_job_state <> 'STOPPED') loop
446:
447: DBMS_DATAPUMP.GET_STATUS(
448: l_dp_handle,
449: dbms_datapump.ku$_status_job_error +
450: dbms_datapump.ku$_status_job_status +
451: dbms_datapump.ku$_status_wip,
452: -1,
453: l_job_state,
446:
447: DBMS_DATAPUMP.GET_STATUS(
448: l_dp_handle,
449: dbms_datapump.ku$_status_job_error +
450: dbms_datapump.ku$_status_job_status +
451: dbms_datapump.ku$_status_wip,
452: -1,
453: l_job_state,
454: l_status);
447: DBMS_DATAPUMP.GET_STATUS(
448: l_dp_handle,
449: dbms_datapump.ku$_status_job_error +
450: dbms_datapump.ku$_status_job_status +
451: dbms_datapump.ku$_status_wip,
452: -1,
453: l_job_state,
454: l_status);
455:
465: put_stream(g_log, ' STEP 4 done - wait for the job to complete.');
466:
467: put_stream(g_log, 'Final job state = ' || l_job_state );
468:
469: DBMS_DATAPUMP.detach(l_dp_handle);
470:
471: put_stream(g_log, ' STEP 5 done - Export job is done');
472:
473: put_stream(g_log, 'End of Procedure DATAPUMP_IMPORT : Export job is completed successfully ' );
480: put_stream(g_log, 'Exception in Data Pump job . Exception Details : ');
481:
482: put_stream(g_output, 'Exception in Data Pump job . Exception Details : ');
483:
484: dbms_datapump.get_status(
485: l_dp_handle,
486: dbms_datapump.ku$_status_job_error,
487: 0,
488: l_job_state,
482: put_stream(g_output, 'Exception in Data Pump job . Exception Details : ');
483:
484: dbms_datapump.get_status(
485: l_dp_handle,
486: dbms_datapump.ku$_status_job_error,
487: 0,
488: l_job_state,
489: l_status);
490:
487: 0,
488: l_job_state,
489: l_status);
490:
491: IF ( bitand(l_status.mask,dbms_datapump.ku$_status_job_error) <> 0 ) THEN
492:
493: l_lerror := l_status.error;
494:
495: IF l_lerror IS NOT NULL THEN
525: END LOOP; -- WHILE l_ind IS NOT NULL
526:
527: END IF; -- IF l_lerror IS NOT NULL
528:
529: END IF; -- IF ( bitand(l_status.mask,dbms_datapump.ku$_status_job_error) <> 0 )
530:
531: -- Optionally STOP the job incase of exception.
532: DBMS_DATAPUMP.stop_job(
533: handle => l_dp_handle,
528:
529: END IF; -- IF ( bitand(l_status.mask,dbms_datapump.ku$_status_job_error) <> 0 )
530:
531: -- Optionally STOP the job incase of exception.
532: DBMS_DATAPUMP.stop_job(
533: handle => l_dp_handle,
534: immediate => 1,
535: keep_master => 0);
536:
533: handle => l_dp_handle,
534: immediate => 1,
535: keep_master => 0);
536:
537: DBMS_DATAPUMP.detach(l_dp_handle);
538:
539: retcode := 1;
540:
541: errbuf := sqlerrm;