@@ -215,6 +215,23 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest,
215
215
File metadataFile ,
216
216
Date oaiDateStamp ,
217
217
PrintWriter cleanupLog ) throws ImportException , IOException {
218
+
219
+ logger .fine ("importing " + metadataFormat + " saved in " + metadataFile .getAbsolutePath ());
220
+
221
+ //@todo check for an IOException here, through ImportException instead, if caught
222
+ String metadataAsString = new String (Files .readAllBytes (metadataFile .toPath ()));
223
+ return doImportHarvestedDataset (dataverseRequest , harvestingClient , harvestIdentifier , metadataFormat , metadataAsString , oaiDateStamp , cleanupLog );
224
+ }
225
+
226
+ @ TransactionAttribute (TransactionAttributeType .REQUIRES_NEW )
227
+ public Dataset doImportHarvestedDataset (DataverseRequest dataverseRequest ,
228
+ HarvestingClient harvestingClient ,
229
+ String harvestIdentifier ,
230
+ String metadataFormat ,
231
+ String metadataString ,
232
+ Date oaiDateStamp ,
233
+ PrintWriter cleanupLog ) throws ImportException , IOException {
234
+
218
235
if (harvestingClient == null || harvestingClient .getDataverse () == null ) {
219
236
throw new ImportException ("importHarvestedDataset called with a null harvestingClient, or an invalid harvestingClient." );
220
237
}
@@ -234,32 +251,32 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest,
234
251
// Kraffmiller's export modules; replace the logic below with clean
235
252
// programmatic lookup of the import plugin needed.
236
253
254
+ logger .fine ("importing " + metadataFormat + " for " + harvestIdentifier );
255
+
237
256
if ("ddi" .equalsIgnoreCase (metadataFormat ) || "oai_ddi" .equals (metadataFormat )
238
257
|| metadataFormat .toLowerCase ().matches ("^oai_ddi.*" )) {
239
258
try {
240
- String xmlToParse = new String (Files .readAllBytes (metadataFile .toPath ()));
259
+ /// String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
241
260
// TODO:
242
261
// import type should be configurable - it should be possible to
243
262
// select whether you want to harvest with or without files,
244
263
// ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
245
- logger .fine ("importing DDI " +metadataFile .getAbsolutePath ());
246
- dsDTO = importDDIService .doImport (ImportType .HARVEST , xmlToParse );
247
- } catch (IOException | XMLStreamException | ImportException e ) {
264
+ /// logger.fine("importing DDI "+metadataFile.getAbsolutePath());
265
+ dsDTO = importDDIService .doImport (ImportType .HARVEST , metadataString );
266
+ } catch (XMLStreamException | ImportException e ) {
248
267
throw new ImportException ("Failed to process DDI XML record: " + e .getClass () + " (" + e .getMessage () + ")" );
249
268
}
250
269
} else if ("dc" .equalsIgnoreCase (metadataFormat ) || "oai_dc" .equals (metadataFormat )) {
251
- logger .fine ("importing DC " +metadataFile .getAbsolutePath ());
270
+ // logger.fine("importing DC "+metadataFile.getAbsolutePath());
252
271
try {
253
- String xmlToParse = new String (Files .readAllBytes (metadataFile .toPath ()));
254
- dsDTO = importGenericService .processOAIDCxml (xmlToParse , harvestIdentifier , harvestingClient .isUseOaiIdentifiersAsPids ());
255
- } catch (IOException | XMLStreamException e ) {
272
+ /// String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
273
+ dsDTO = importGenericService .processOAIDCxml (metadataString , harvestIdentifier , harvestingClient .isUseOaiIdentifiersAsPids ());
274
+ } catch (XMLStreamException e ) {
256
275
throw new ImportException ("Failed to process Dublin Core XML record: " + e .getClass () + " (" + e .getMessage () + ")" );
257
276
}
258
277
} else if ("dataverse_json" .equals (metadataFormat )) {
259
278
// This is Dataverse metadata already formatted in JSON.
260
- // Simply read it into a string, and pass to the final import further down:
261
- logger .fine ("Attempting to import custom dataverse metadata from file " +metadataFile .getAbsolutePath ());
262
- json = new String (Files .readAllBytes (metadataFile .toPath ()));
279
+ json = metadataString ;
263
280
} else {
264
281
throw new ImportException ("Unsupported import metadata format: " + metadataFormat );
265
282
}
@@ -394,17 +411,23 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest,
394
411
395
412
} catch (JsonParseException | ImportException | CommandException ex ) {
396
413
logger .fine ("Failed to import harvested dataset: " + ex .getClass () + ": " + ex .getMessage ());
397
- FileOutputStream savedJsonFileStream = new FileOutputStream (new File (metadataFile .getAbsolutePath () + ".json" ));
398
- byte [] jsonBytes = json .getBytes ();
399
- int i = 0 ;
400
- while (i < jsonBytes .length ) {
401
- int chunkSize = i + 8192 <= jsonBytes .length ? 8192 : jsonBytes .length - i ;
402
- savedJsonFileStream .write (jsonBytes , i , chunkSize );
403
- i += chunkSize ;
404
- savedJsonFileStream .flush ();
414
+
415
+ if (!"dataverse_json" .equals (metadataFormat ) && json != null ) {
416
+ // If this was an xml format that were able to transform into
417
+ // our json, let's save it for debugging etc. purposes
418
+ File tempFile = File .createTempFile ("meta" , ".json" );
419
+ FileOutputStream savedJsonFileStream = new FileOutputStream (tempFile );
420
+ byte [] jsonBytes = json .getBytes ();
421
+ int i = 0 ;
422
+ while (i < jsonBytes .length ) {
423
+ int chunkSize = i + 8192 <= jsonBytes .length ? 8192 : jsonBytes .length - i ;
424
+ savedJsonFileStream .write (jsonBytes , i , chunkSize );
425
+ i += chunkSize ;
426
+ savedJsonFileStream .flush ();
427
+ }
428
+ savedJsonFileStream .close ();
429
+ logger .info ("JSON produced saved in " + tempFile .getAbsolutePath ());
405
430
}
406
- savedJsonFileStream .close ();
407
- logger .info ("JSON produced saved in " + metadataFile .getAbsolutePath () + ".json" );
408
431
throw new ImportException ("Failed to import harvested dataset: " + ex .getClass () + " (" + ex .getMessage () + ")" , ex );
409
432
}
410
433
return importedDataset ;
0 commit comments