C# handles super large images (1GB) and requires special attention to memory management and performance optimization. Here are several efficient cutting solutions:
Method 1: Use block processing (memory optimization version)
using System; using ; using ; using ; class Program { static void Main() { string sourceImagePath = "1GB_Image.bmp"; string outputFolder = "CroppedImages"; if (!(outputFolder)) { (outputFolder); } // Get the image size but not load all content using (var image = (sourceImagePath)) { int totalWidth = ; int totalHeight = ; // Calculate each size (2x4 grid) int chunkWidth = totalWidth / 2; int chunkHeight = totalHeight / 4; // Cut in blocks for (int row = 0; row < 4; row++) { for (int col = 0; col < 2; col++) { int x = col * chunkWidth; int y = row * chunkHeight; // Make sure the last piece contains the remaining part int width = (col == 1) ? totalWidth - x : chunkWidth; int height = (row == 3) ? totalHeight - y : chunkHeight; CropImage( sourceImagePath, (outputFolder, $"part_{row}_{col}.jpg"), x, y, width, height); } } } } static void CropImage(string sourcePath, string destPath, int x, int y, int width, int height) { // Use streaming to avoid full image loading using (var source = new Bitmap(sourcePath)) using (var dest = new Bitmap(width, height)) using (var graphics = (dest)) { ( source, new Rectangle(0, 0, width, height), new Rectangle(x, y, width, height), ); // Save as JPEG to reduce volume (destPath, ); ($"Saved: {destPath} ({width}x{height})"); } } }
Method 2: Use ImageSharp (modern cross-platform solution)
First install the NuGet package:
Install-Package
Implementation code:
using ; using ; using ; class Program { static async Task Main() { string sourcePath = "1GB_Image.jpg"; string outputDir = "CroppedImages"; (outputDir); //Configure memory options to handle large images var configuration = (); = new (); // Block loading and processing using (var image = await (configuration, sourcePath)) { int totalWidth = ; int totalHeight = ; int chunkWidth = totalWidth / 2; int chunkHeight = totalHeight / 4; for (int row = 0; row < 4; row++) { for (int col = 0; col < 2; col++) { int x = col * chunkWidth; int y = row * chunkHeight; int width = (col == 1) ? totalWidth - x : chunkWidth; int height = (row == 3) ? totalHeight - y : chunkHeight; //Clone and crop the area using (var cropped = (ctx => ctx .Crop(new Rectangle(x, y, width, height)))) { string outputPath = (outputDir, $"part_{row}_{col}.jpg"); await (outputPath, new JpegEncoder { Quality = 80 // Appropriate compression }); ($"Saved: {outputPath}"); } } } } } }
Method 3: Use memory mapped files to process super large images
using System; using ; using ; using ; using ; class Program { static void Main() { string sourcePath = "1GB_Image.bmp"; string outputDir = "CroppedImages"; (outputDir); // Get BMP file header information var bmpInfo = GetBmpInfo(sourcePath); int width = ; int height = ; int bytesPerPixel = / 8; int stride = width * bytesPerPixel; // Calculate chunking int chunkWidth = width / 2; int chunkHeight = height / 4; // Use memory mapped file processing using (var mmf = (sourcePath, )) { for (int row = 0; row < 4; row++) { for (int col = 0; col < 2; col++) { int x = col * chunkWidth; int y = row * chunkHeight; int cropWidth = (col == 1) ? width - x : chunkWidth; int cropHeight = (row == 3) ? height - y : chunkHeight; // Create target bitmap using (var dest = new Bitmap(cropWidth, cropHeight, PixelFormat.Format24bppRgb)) { var destData = ( new Rectangle(0, 0, cropWidth, cropHeight), , ); try { // Calculate the source file offset (BMP file header 54 bytes + data offset) long offset = 54 + (height - y - 1) * stride + x * bytesPerPixel; // Copy line by line for (int line = 0; line < cropHeight; line++) { using (var accessor = ( offset - line * stride, cropWidth * bytesPerPixel)) { byte[] lineData = new byte[cropWidth * bytesPerPixel]; (0, lineData, 0, ); IntPtr destPtr = destData.Scan0 + (line * ); (lineData, 0, destPtr, ); } } } finally { (destData); } string outputPath = (outputDir, $"part_{row}_{col}.jpg"); (outputPath, ); ($"Saved: {outputPath}"); } } } } } static (int Width, int Height, int BitsPerPixel) GetBmpInfo(string filePath) { using (var fs = new FileStream(filePath, )) using (var reader = new BinaryReader(fs)) { // Read the BMP header if (() != 'B' || () != 'M') throw new Exception("Not a valid BMP file"); (18, ); // Jump to width information int width = reader.ReadInt32(); int height = reader.ReadInt32(); (28, ); // Jump to the deep information int bitsPerPixel = reader.ReadInt16(); return (width, height, bitsPerPixel); } } }
Method 4: Use (Professional Image Processing)
First install the NuGet package:
Install-Package -Q16-x64
Implementation code:
using ImageMagick; using System; using ; class Program { static void Main() { string sourcePath = "1GB_Image.tif"; string outputDir = "CroppedImages"; (outputDir); // Set resource limits (, 1024 * 1024 * 1024); // 1GB // Use pixel stream to process large images using (var image = new MagickImage(sourcePath)) { int width = ; int height = ; int chunkWidth = width / 2; int chunkHeight = height / 4; for (int row = 0; row < 4; row++) { for (int col = 0; col < 2; col++) { int x = col * chunkWidth; int y = row * chunkHeight; int cropWidth = (col == 1) ? width - x : chunkWidth; int cropHeight = (row == 3) ? height - y : chunkHeight; using (var cropped = (new MagickGeometry { X = x, Y = y, Width = cropWidth, Height = cropHeight })) { string outputPath = (outputDir, $"part_{row}_{col}.jpg"); = 85; (outputPath); ($"Saved: {outputPath}"); } } } } } }
Cropping solution selection suggestions
method | advantage | shortcoming | Use scenarios |
---|---|---|---|
Built-in library, simple | High memory usage | Windows small picture processing | |
ImageSharp | Cross-platform, modern API | Learning curve | Need cross-platform support |
Memory Mapping | High memory efficiency | Complex, only BMP | Extra large image processing |
Powerful | Need to install | Professional image processing |
Things to note
1. Memory management: At least 2-3GB of available memory is required to process 1GB of images
2. File format: BMP/TIFF is suitable for processing, JPEG may have compression problems
3. Disk space: Ensure there is enough space to store output files
4. Exception handling: add try-catch to handle IO and memory shortage
5. Performance optimization:
- Using 64-bit applications
- Increase GC memory limit: <gcAllowVeryLargeObjects enabled="true"/>
- Batch processing reduces memory pressure
This is the article about C#’s implementation of cropping super large pictures (1GB) into 8 small pictures. For more related C# image cropping content, please search for my previous articles or continue browsing the related articles below. I hope everyone will support me in the future!