Question Details

No question body available.

Tags

java spring postgresql kotlin hibernate

Answers (3)

Accepted Answer Available
Accepted Answer
November 5, 2025 Score: 2 Rep: 151 Quality: High Completeness: 80%

I found a solution: I should use the Large Object API for PostgreSQL instead of using bytea and Blob in my entities. Thanks to Andrey, who provided a very useful resource in the comments.

Here's working code that allows you to get a stream from a database object. To avoid the "slow client" problem and connection pool exhaustion, the file from the database is read into a temporary file on disk and a stream from this file is returned to the client. After reading, the file will be deleted.

//the service level is deliberately omitted 
@RestController  
class LargeFileController(  
    private val fileRepo: LargeFileContentRepo,  
    private val dataSource: DataSource,  
) {  

@PostMapping("large/file", consumes = [MediaType.MULTIPARTFORMDATAVALUE]) @Transactional //working with lage object requires active transaction fun saveFile(@RequestBody file: MultipartFile) { val conn = DataSourceUtils.getConnection(dataSource) try { val pgConn = conn.unwrap(org.postgresql.PGConnection::class.java) val lobj = pgConn.largeObjectAPI

val oid = lobj.createLO(LargeObjectManager.WRITE) val obj = lobj.open(oid, LargeObjectManager.WRITE)

file.inputStream.use { input -> input.copyTo(obj.outputStream) }

obj.close()

val entity = LargeFile( oid = oid, fileName = file.originalFilename, size = file.size, mimeType = file.contentType ) fileRepo.save(entity) } finally { DataSourceUtils.releaseConnection(conn, dataSource) } }

@GetMapping("large/file/{id}", produces = [MediaType.APPLICATION
OCTETSTREAMVALUE]) @Transactional fun getFileById(@PathVariable("id") id: Long, response: HttpServletResponse): ResponseEntity { val file = fileRepo.findById(id).orElseThrow() val tempFile = Files.createTempFile(UUID.randomUUID().toString(),file.fileName!!) return try { val conn: Connection = DataSourceUtils.getConnection(dataSource) try { val pgConn = conn.unwrap(org.postgresql.PGConnection::class.java) val lobj = pgConn.largeObjectAPI

val obj = lobj.open(file.oid, LargeObjectManager.READ) val inputStream = obj.inputStream

inputStream.use { lois -> tempFile.outputStream().use { fileOutputStream -> IOUtils.copy(lois, fileOutputStream) } } obj.close()

val contentDisposition = ContentDisposition.builder("attachment") .filename(file.fileName, StandardCharsets.UTF8) .build() ResponseEntity.ok() .header(HttpHeaders.CONTENTDISPOSITION, contentDisposition.toString()) .header(HttpHeaders.CONTENTLENGTH, tempFile.fileSize().toString()) .contentType(MediaType.APPLICATIONOCTETSTREAM) .body(InputStreamResource(tempFile.inputStream())) //or

/*tempFile.inputStream().use { input -> val contentDisposition = ContentDisposition.builder("attachment") .filename(file.fileName, StandardCharsets.UTF
8) .build() response.contentType = MediaType.MULTIPARTFORMDATAVALUE response.addHeader(HttpHeaders.CONTENTDISPOSITION, contentDisposition.toString()) response.outputStream.use { output -> IOUtils.copy(input, output) } }*/ } finally { DataSourceUtils.releaseConnection(conn, dataSource) } } finally { try { if (Files.exists(tempFile)) { Files.deleteIfExists(tempFile) } } catch (ex: Exception) { log.warn("Failed to delete temp file: ${tempFile.toAbsolutePath()}", ex) } } }

companion object { private val log = LoggerFactory.getLogger(LargeFileController::class.java) } }

Full example here

UPDATE:

My mistake was that I was trying to store a Blob in my table in a bytea column. All I had to do was change the column type in the database from bytea to bigint for this column. JPA works correctly with Blobs in this case (it stores the file's oid in the column).

Here is a complete correct example:


@Entity
@Table(name = "filecontent")
class FileContent (
    //The field type in the database must be bigint, not bytea. This field will store the OID of the created file.
    @Lob
    val oid: Blob,

val name: String?,

val mimeType: String?,

val size: Long,

@OneToMany(cascade = [CascadeType.ALL], mappedBy = "file") @BatchSize(size = 20) val tags: List,

@Id @GeneratedValue(strategy = GenerationType.IDENTITY) val id: Long?=null, ) { constructor(file: MultipartFile) : this( oid = BlobProxy.generateProxy(file.inputStream, file.size), name = file.originalFilename!!, mimeType = file.contentType!!, size = file.size, tags = listOf() ) }

@Service class FileService( private val fileRepo: FileRepo ) {

@Transactional fun getFile(id: Long): Pair { val fileInfo = fileRepo.findById(id).orElseThrow { RuntimeException("File not found") }.content val largeObj = fileInfo.oid val tmpFile = Files.createTempFile(fileInfo.name, null) tmpFile.outputStream().use { out -> largeObj.binaryStream.use { it.copyTo(out) } } return fileInfo to tmpFile.toFile() }

fun saveFile(file: MultipartFile): Long { return fileRepo.save(FileMetadata(file)).id!! }

}

@RestController class BlobObjectController( val fileService: FileService ) {

@GetMapping("/file/{id}", produces = [MediaType.APPLICATION
OCTETSTREAMVALUE]) fun getFileById(@PathVariable("id") id: Long): ResponseEntity { val (metadata, tempFile) = fileService.getFile(id) val contentDisposition = ContentDisposition.builder("attachment") .filename(metadata.name, StandardCharsets.UTF8) .build() val contentType = metadata.mimeType ?.let { runCatching { MediaType.valueOf(it) }.getOrNull() } ?: MediaType.APPLICATIONOCTETSTREAM

val responseBody = StreamingResponseBody { outputStream -> tempFile.inputStream().use { input -> input.copyTo(outputStream) }

val deleted = Files.deleteIfExists(tempFile.toPath()) log.info("File ${if (deleted) "was deleted" else "WAS NOT DELETED"} after response sent: ${tempFile.name}") } return ResponseEntity.ok() .header(HttpHeaders.CONTENT
DISPOSITION, contentDisposition.toString()) .contentLength(metadata.size) .contentType(contentType) .body(responseBody)

}

@PostMapping("/file", consumes = [MediaType.MULTIPARTFORMDATA_VALUE]) fun saveFile(@RequestBody file: MultipartFile) { fileService.saveFile(file) }

companion object { private val log = LoggerFactory.getLogger(BlobObjectController::class.java) } }

November 2, 2025 Score: 1 Rep: 181 Quality: Low Completeness: 50%

I don't quite understand the reason behind saving large file in your database when you want to save it as temp file first and then process it. Why take the overhead of reading the file from database first? Just save it in files and read it from there. Furthermore I think it is possible to read large file from DB in chunks you may try this:

YourEntity entity = entityManager.find(YourEntity.class, id); Blob blob = entity.yourBlobColumn(); try (InputStream in = blob.getBinaryStream()) { byte[] buffer = new byte[8192]; // 8 KB buffer int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { // process or write chunk } }
November 6, 2025 Score: 1 Rep: 6,309 Quality: Low Completeness: 60%

Looks like you have overingeneered something. There is no need to use PG API:

public class BlobTestIT {

@Autowired private JdbcTemplate jdbcTemplate;

@Test @Transactional void testblob() throws Exception {

byte[] bytes = new byte[1000]; Arrays.fill(bytes, (byte) 'a');

Path tempFile = Files.createTempFile("test
blob", ".bin"); try (OutputStream os = Files.newOutputStream(tempFile)) { os.write(bytes); }

Long blobId; try (InputStream is = Files.newInputStream(tempFile)) { blobId = jdbcTemplate.query("select ?", ps -> { ps.setBlob(1, BlobProxy.generateProxy(is, tempFile.toFile().length())); }, (rs, rowNum) -> rs.getLong(1)).get(0);

assertThat(blobId).isNotNull(); }

Blob blob = jdbcTemplate.query("select ?", ps -> { ps.setLong(1, blobId); }, (rs, rowNum) -> rs.getBlob(1)).get(0);

assertThat(blob).isNotNull();

try (InputStream is = blob.getBinaryStream()) { byte[] read = IOUtils.toByteArray(is); assertThat(read) .containsExactly(bytes); } }

}