pixelmed
EntropyCodedSegment.java
Go to the documentation of this file.
1 /* Copyright (c) 2014-2015, David A. Clunie DBA Pixelmed Publishing. All rights reserved. */
2 
3 package com.pixelmed.codec.jpeg;
4 
5 import java.awt.Rectangle;
6 import java.awt.Shape;
7 
8 import java.io.ByteArrayOutputStream;
9 import java.io.IOException;
10 import java.io.OutputStream;
11 
12 import java.util.HashMap;
13 import java.util.Map;
14 import java.util.Vector;
15 
23 public class EntropyCodedSegment {
24 
25  private static final String identString = "@(#) $Header: /userland/cvs/codec/com/pixelmed/codec/jpeg/EntropyCodedSegment.java,v 1.24 2016/01/16 13:30:09 dclunie Exp $";
26 
27  private boolean copying;
28  private boolean decompressing;
29 
30  private OutputArrayOrStream[] decompressedOutputPerComponent;
31 
32  private boolean isHuffman;
33  private boolean isDCT;
34  private boolean isLossless;
35 
36  private ByteArrayOutputStream copiedBytes;
37 
38  private final MarkerSegmentSOS sos;
39  private final MarkerSegmentSOF sof;
40  private final Map<String,HuffmanTable> htByClassAndIdentifer;
41  private final Map<String,QuantizationTable> qtByIdentifer;
42 
43  private final int nComponents;
44  private final int[] DCEntropyCodingTableSelector;
45  private final int[] ACEntropyCodingTableSelector;
46  private final int[] HorizontalSamplingFactor;
47  private final int[] VerticalSamplingFactor;
48 
49  private final int maxHorizontalSamplingFactor;
50  private final int maxVerticalSamplingFactor;
51 
52  private final int nMCUHorizontally;
53 
54  private final Vector<Shape> redactionShapes;
55 
56  // stuff for lossless decompression ...
57  private final int predictorForFirstSample;
58  private final int[] predictorForComponent;
59  private final int predictorSelectionValue;
60 
61  // these are class level and used by getOneLosslessValue() to maintain state (updates them) and initialized by constructor
62  private int[] rowNumberAtBeginningOfRestartInterval; // indexed by component number, not final since set at beginning of each
63  private final int[] rowLength; // indexed by component number
64  private final int[] currentRowNumber; // indexed by component number
65  private final int[] positionWithinRow; // indexed by component number
66  private final int[][] previousReconstructedRow; // indexed by component number, positionWithinRow
67  private final int[][] currentReconstructedRow; // indexed by component number, positionWithinRow
68 
69  // stuff for bit extraction ...
70  // copied from com.pixelmed.scpecg.HuffmanDecoder ...
71  private byte[] bytesToDecompress;
72  private int availableBytes;
73  private int byteIndex;
74  private int bitIndex;
75  private int currentByte;
76  private int currentBits;
77  private int haveBits;
78 
79  private static final int[] extractBitFromByteMask = { 0x80,0x40,0x20,0x10,0x08,0x04,0x02,0x01 };
80 
81  private final void getEnoughBits(int wantBits) throws Exception {
82  while (haveBits < wantBits) {
83  if (bitIndex > 7) {
84  if (byteIndex < availableBytes) {
85  currentByte=bytesToDecompress[byteIndex++];
86 //System.err.println("currentByte["+byteIndex+"] now = 0x"+Integer.toHexString(currentByte&0xff)+" "+Integer.toBinaryString(currentByte&0xff));
87  bitIndex=0;
88  }
89  else {
90  throw new Exception("No more bits (having decompressed "+byteIndex+" dec bytes)");
91  }
92  }
93  int newBit = (currentByte & extractBitFromByteMask[bitIndex++]) == 0 ? 0 : 1;
94  currentBits = (currentBits << 1) + newBit;
95  ++haveBits;
96  }
97 //System.err.println("getEnoughBits(): returning "+haveBits+" bits "+Integer.toBinaryString(currentBits)+" (ending at byte "+byteIndex+" bit "+(bitIndex-1)+")");
98  }
99 
100  private int writeByte; // only contains meaningful content when writeBitIndex > 0
101  private int writeBitIndex; // 0 means ready to write 1st (high) bit to writeByte, 7 means ready to write last (low) bit to writeByte, will transiently (inside writeBits only) be 8 to signal new byte needed
102 
103  private final void initializeWriteBits() {
104  copiedBytes = new ByteArrayOutputStream();
105  writeByte = 0;
106  writeBitIndex = 0; // start writing into 1st (high) bit of writeByte
107  }
108 
109  private final void flushWriteBits() {
110  if (writeBitIndex > 0) {
111  // bits have been written to writeByte so need to pad it with 1s and write it
112  while (writeBitIndex < 8) {
113  writeByte = writeByte | extractBitFromByteMask[writeBitIndex];
114  ++writeBitIndex;
115  }
116  copiedBytes.write(writeByte);
117  if ((writeByte&0xff) == 0xff) {
118  copiedBytes.write(0); // stuffed zero byte after 0xff to prevent being considered marker
119  }
120  writeByte=0;
121  writeBitIndex=0;
122  }
123  // else have not written any bits to writeByte, so do nothing
124  }
125 
126  private final void writeBits(int bits,int nBits) {
127 //System.err.println("writeBits(): writing "+nBits+" bits "+Integer.toBinaryString(bits));
128  if (nBits > 0) {
129  for (int i=nBits-1; i>=0; --i) {
130  final int whichBitMask = 1 << i; // bits are "big endian"
131  final int bitIsSet = bits & whichBitMask; // zero or not zero
132  // do not need to check writeBitIndex before "writing" ... will always be "ready"
133  if (bitIsSet != 0) {
134  writeByte = writeByte | extractBitFromByteMask[writeBitIndex];
135  }
136  ++writeBitIndex;
137  if (writeBitIndex > 7) {
138 //System.err.println("writeBits(): wrote = 0x"+Integer.toHexString(writeByte&0xff)+" "+Integer.toBinaryString(writeByte&0xff));
139  copiedBytes.write(writeByte);
140  if ((writeByte&0xff) == 0xff) {
141  copiedBytes.write(0); // stuffed zero byte after 0xff to prevent being considered marker
142  }
143  writeByte=0;
144  writeBitIndex=0;
145  }
146  }
147  }
148  }
149 
150 
151 
152  private HuffmanTable usingTable = null;
153 
154 //int counter = 0;
155 
156  // Use 10918-1 F.2 Figure F.16 decode procedure
157 
163  private final int decode() throws Exception {
164  final int[] MINCODE = usingTable.getMINCODE();
165  final int[] MAXCODE = usingTable.getMAXCODE();
166  final int[] VALPTR = usingTable.getVALPTR();
167  final int[] HUFFVAL = usingTable.getHUFFVAL();
168 
169  int I=1;
170  getEnoughBits(I); // modifies currentBits
171  int CODE = currentBits;
172  while (I<MAXCODE.length && CODE > MAXCODE[I]) {
173  //while (CODE > MAXCODE[I]) {
174  ++I;
175 //System.err.println("I = "+I);
176  getEnoughBits(I); // modifies currentBits
177  CODE = currentBits;
178 //System.err.println("CODE "+Integer.toBinaryString(CODE));
179 //System.err.println("compare to MAXCODE[I] "+(I<MAXCODE.length ? Integer.toBinaryString(MAXCODE[I]) : "out of MAXCODE entries"));
180  }
181 //System.err.println("Found CODE "+Integer.toBinaryString(CODE));
182  int VALUE = 0;
183  if (I<MAXCODE.length) {
184  int J = VALPTR[I];
185 //System.err.println("Found VALPTR base "+J);
186  J = J + CODE - MINCODE[I];
187 //System.err.println("Found VALPTR offset by code "+J);
188  VALUE = HUFFVAL[J];
189 //System.err.println("Found VALUE "+VALUE+" dec (0x"+Integer.toHexString(VALUE)+")");
190 //System.err.println("HUFF_DECODE: "+VALUE+" COUNTER "+counter);
191 //++counter;
192  }
193  else {
194  //we have exceeded the maximum coded value specified :(
195  // copy IJG behavior in this situation from jdhuff.c "With garbage input we may reach the sentinel value l = 17" ... "fake a zero as the safest result"
196 //System.err.println("Bad Huffman code "+Integer.toBinaryString(CODE)+" so use VALUE "+VALUE+" dec (0x"+Integer.toHexString(VALUE)+")");
197  }
198  if (copying) { writeBits(currentBits,haveBits); }
199  currentBits=0;
200  haveBits=0;
201  return VALUE;
202  }
203 
204  private final int getValueOfRequestedLength(int wantBits) throws Exception {
205  getEnoughBits(wantBits); // modifies currentBits
206  final int value = currentBits;
207 //System.err.println("getValueOfRequestedLength(): wantBits="+wantBits+" : Got value "+value+" dec (0x"+Integer.toHexString(value)+")");
208  if (copying) { writeBits(currentBits,haveBits); }
209  currentBits=0;
210  haveBits=0;
211  return value;
212  }
213 
214  // values above index 11 only occur for 12 bit process ...
215  private int[] dcSignBitMask = { 0x00/*na*/,0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80,0x100,0x200,0x400,0x800,0x1000,0x2000,0x4000 /*no entry for 16*/};
216  private int[] maxAmplitude = { 0/*na*/,0x02-1,0x04-1,0x08-1,0x10-1,0x20-1,0x40-1,0x80-1,0x100-1,0x200-1,0x400-1,0x800-1,0x1000-1,0x2000-1,0x4000-1,0x8000-1 /*no entry for 16*/};
217 
218  private final int convertSignAndAmplitudeBitsToValue(int value,int length) throws Exception {
219  // see P&M Table 11-1 page 190 and Table 11-4 page 193 (same for DC and AC)
220  if (length > 0) {
221 //System.err.println("dcSignBitMask = "+Integer.toHexString(dcSignBitMask[length]));
222  if ((value & dcSignBitMask[length]) == 0) {
223 //System.err.println("Have sign bit");
224  value = value - maxAmplitude[length];
225  }
226  }
227  return value;
228  }
229 
230  private final void writeEntropyCodedAllZeroACCoefficients() {
231  // write a single EOB code, which is rrrrssss = 0x00;
232  writeBits(usingTable.getEOBCode(),usingTable.getEOBCodeLength());
233  }
234 
235 
251  public EntropyCodedSegment(MarkerSegmentSOS sos,MarkerSegmentSOF sof,Map<String,HuffmanTable> htByClassAndIdentifer,Map<String,QuantizationTable> qtByIdentifer,int nMCUHorizontally,Vector<Shape> redactionShapes,boolean copying,boolean dumping,boolean decompressing,Parse.DecompressedOutput decompressedOutput) throws Exception {
252  this.sos = sos;
253  this.sof = sof;
254  this.htByClassAndIdentifer = htByClassAndIdentifer;
255  this.qtByIdentifer = qtByIdentifer;
256  this.nMCUHorizontally = nMCUHorizontally;
257  this.redactionShapes = redactionShapes;
258  this.copying = copying;
259  // dumping is not used other than in this constructor
260  this.decompressing = decompressing;
261  this.decompressedOutputPerComponent = decompressedOutput == null ? null : decompressedOutput.getDecompressedOutputPerComponent();
262 
263  this.isHuffman = Markers.isHuffman(sof.getMarker());
264  if (!isHuffman) {
265  throw new Exception("Only Huffman processes supported (not "+Markers.getAbbreviation(sof.getMarker())+" "+Markers.getDescription(sof.getMarker())+")");
266  }
267  this.isDCT = Markers.isDCT(sof.getMarker());
268  this.isLossless = Markers.isLossless(sof.getMarker());
269 
270  nComponents = sos.getNComponentsPerScan();
271  DCEntropyCodingTableSelector = sos.getDCEntropyCodingTableSelector();
272  ACEntropyCodingTableSelector = sos.getACEntropyCodingTableSelector();
273  HorizontalSamplingFactor = sof.getHorizontalSamplingFactor();
274  VerticalSamplingFactor = sof.getVerticalSamplingFactor();
275 
276  maxHorizontalSamplingFactor = max(HorizontalSamplingFactor);
277 //System.err.println("maxHorizontalSamplingFactor "+maxHorizontalSamplingFactor);
278  maxVerticalSamplingFactor = max(VerticalSamplingFactor);
279 //System.err.println("maxVerticalSamplingFactor "+maxVerticalSamplingFactor);
280 
281  if (isLossless && decompressing) {
282 //System.err.println("SamplePrecision "+sof.getSamplePrecision());
283 //System.err.println("SuccessiveApproximationBitPositionLowOrPointTransform "+sos.getSuccessiveApproximationBitPositionLowOrPointTransform());
284  predictorForFirstSample = 1 << (sof.getSamplePrecision() - sos.getSuccessiveApproximationBitPositionLowOrPointTransform() - 1);
285 //System.err.println("predictorForFirstSample "+predictorForFirstSample+" dec");
286  predictorForComponent = new int[nComponents];
287  predictorSelectionValue = sos.getStartOfSpectralOrPredictorSelection();
288 //System.err.println("predictorSelectionValue "+predictorSelectionValue);
289 
290  rowLength = new int[nComponents];
291  currentRowNumber = new int[nComponents];
292  positionWithinRow = new int[nComponents];
293  rowNumberAtBeginningOfRestartInterval = new int[nComponents];
294  previousReconstructedRow = new int[nComponents][];
295  currentReconstructedRow = new int[nComponents][];
296  for (int c=0; c<nComponents; ++c) {
297  //rowLength[c] = sof.getNSamplesPerLine()/sof.getHorizontalSamplingFactor()[c];
298  rowLength[c] = (sof.getNSamplesPerLine()-1)/sof.getHorizontalSamplingFactor()[c]+1; // account for sampling of row lengths not an exact multiple of sampling factor ... hmmm :(
299 //System.err.println("rowLength["+c+"] "+rowLength[c]);
300  currentRowNumber[c] = 0;
301  positionWithinRow[c] = 0;
302  rowNumberAtBeginningOfRestartInterval[c] = 0;
303  previousReconstructedRow[c] = new int[rowLength[c]];
304  currentReconstructedRow[c] = new int[rowLength[c]];
305  }
306  }
307  else {
308  predictorForFirstSample = 0; // silence uninitialized warnings
309  predictorForComponent = null;
310  predictorSelectionValue = 0;
311  rowLength = null;
312  currentRowNumber = null;
313  positionWithinRow = null;
314  rowNumberAtBeginningOfRestartInterval = null;
315  previousReconstructedRow = null;
316  currentReconstructedRow = null;
317  }
318 
319  if (dumping) dumpHuffmanTables();
320  //dumpQuantizationTables();
321  }
322 
323  private final int getOneLosslessValue(int c,int dcEntropyCodingTableSelector,int colMCU,int rowMCU) throws Exception {
324  // per P&M page 492 (DIS H-2)
325  int prediction = 0;
326  if (decompressing) {
327  if (currentRowNumber[c] == rowNumberAtBeginningOfRestartInterval[c]) { // will be true for first row since all rowNumberAtBeginningOfRestartInterval entries are initialized to zero
328  if (positionWithinRow[c] == 0) { // first sample of first row
329 //System.err.println("Component "+c+" first sample of first row or first row after beginning of restart interval ... use predictorForFirstSample");
330  prediction = predictorForFirstSample;
331  }
332  else {
333 //System.err.println("Component "+c+" other than first sample of first row or first row after beginning of restart interval ... use Ra (previous sample in row)");
334  prediction = currentReconstructedRow[c][positionWithinRow[c]-1]; // Ra
335  }
336  }
337  else if (positionWithinRow[c] == 0) { // first sample of subsequent rows
338 //System.err.println("Component "+c+" first sample of subsequent rows");
339  prediction = previousReconstructedRow[c][0]; // Rb for position 0
340  }
341  else {
342  switch(predictorSelectionValue) {
343  case 1: prediction = currentReconstructedRow[c][positionWithinRow[c]-1]; // Ra
344  break;
345  case 2: prediction = previousReconstructedRow[c][positionWithinRow[c]]; // Rb
346  break;
347  case 3: prediction = previousReconstructedRow[c][positionWithinRow[c]-1]; // Rc
348  break;
349  case 4: prediction = currentReconstructedRow[c][positionWithinRow[c]-1] + previousReconstructedRow[c][positionWithinRow[c]] - previousReconstructedRow[c][positionWithinRow[c]-1]; // Ra + Rb - Rc
350  break;
351  case 5: prediction = currentReconstructedRow[c][positionWithinRow[c]-1] + ((previousReconstructedRow[c][positionWithinRow[c]] - previousReconstructedRow[c][positionWithinRow[c]-1])>>1); // Ra + (Rb - Rc)/2
352  break;
353  case 6: prediction = previousReconstructedRow[c][positionWithinRow[c]] + ((currentReconstructedRow[c][positionWithinRow[c]-1] - previousReconstructedRow[c][positionWithinRow[c]-1])>>1); // Rb + (Ra - Rc)/2
354  break;
355  case 7: prediction = (currentReconstructedRow[c][positionWithinRow[c]-1] + previousReconstructedRow[c][positionWithinRow[c]])>>1; // (Ra+Rb)/2
356  break;
357  default:
358  throw new Exception("Unrecognized predictor selection value "+predictorSelectionValue);
359  }
360  }
361 //System.err.println("prediction ["+currentRowNumber[c]+","+positionWithinRow[c]+"] = "+prediction+" dec (0x"+Integer.toHexString(prediction)+")");
362  }
363 
364  usingTable = htByClassAndIdentifer.get("0+"+Integer.toString(dcEntropyCodingTableSelector));
365 
366  final int ssss = decode(); // number of DC bits encoded next
367  // see P&M Table 11-1 page 190
368  int dcValue = 0;
369  if (ssss == 0) {
370  dcValue = 0;
371  }
372  else if (ssss == 16) { // only occurs for lossless
373  dcValue = 32768;
374  }
375  else {
376  final int dcBits = getValueOfRequestedLength(ssss);
377  dcValue = convertSignAndAmplitudeBitsToValue(dcBits,ssss);
378  }
379 //System.err.println("encoded difference value ["+currentRowNumber[c]+","+positionWithinRow[c]+"] = "+dcValue+" dec (0x"+Integer.toHexString(dcValue)+")");
380 
381  int reconstructedValue = 0;
382 
383  if (decompressing) {
384  reconstructedValue = (dcValue + prediction) & 0x0000ffff;
385 
386 //System.err.println("reconstructedValue value ["+currentRowNumber[c]+","+positionWithinRow[c]+"] = "+reconstructedValue+" dec (0x"+Integer.toHexString(reconstructedValue)+")");
387 
388  currentReconstructedRow[c][positionWithinRow[c]] = reconstructedValue;
389 
390  ++positionWithinRow[c];
391  if (positionWithinRow[c] >= rowLength[c]) {
392 //System.err.println("Component "+c+" starting next row");
393  positionWithinRow[c] = 0;
394  ++currentRowNumber[c];
395  int[] holdRow = previousReconstructedRow[c];
396  previousReconstructedRow[c] = currentReconstructedRow[c];
397  currentReconstructedRow[c] = holdRow; // values do not matter, will be overwritten, saves deallocating and reallocating
398  }
399  }
400 
401  return reconstructedValue; // meaingless unless decompressing, but still need to have absorbed bits from input to stay in sync
402  }
403 
404  // A "data unit" is the "smallest logical unit that can be processed", which in the case of DCT-based processes is one 8x8 block of coefficients (P&M page 101)
405  private final void getOneDCTDataUnit(int dcEntropyCodingTableSelector,int acEntropyCodingTableSelector,boolean redact) throws Exception {
406  usingTable = htByClassAndIdentifer.get("0+"+Integer.toString(dcEntropyCodingTableSelector));
407  {
408  final int ssss = decode(); // number of DC bits encoded next
409  // see P&M Table 11-1 page 190
410  int dcValue = 0;
411  if (ssss == 0) {
412  dcValue = 0;
413  }
414  else if (ssss == 16) { // only occurs for lossless
415  dcValue = 32768;
416  }
417  else {
418  final int dcBits = getValueOfRequestedLength(ssss);
419  dcValue = convertSignAndAmplitudeBitsToValue(dcBits,ssss);
420  }
421 //System.err.println("Got DC value "+dcValue+" dec (0x"+Integer.toHexString(dcValue)+")");
422  }
423 
424  usingTable = htByClassAndIdentifer.get("1+"+Integer.toString(acEntropyCodingTableSelector));
425 
426  final boolean restoreCopying = copying;
427  if (redact && copying) {
428  copying = false;
429  writeEntropyCodedAllZeroACCoefficients();
430  }
431 
432  int i=1;
433  while (i<64) {
434 //System.err.println("AC ["+i+"]:");
435  final int rrrrssss = decode();
436  if (rrrrssss == 0) {
437 //System.err.println("AC ["+i+"]: "+"EOB");
438  break; // EOB
439  }
440  else if (rrrrssss == 0xF0) {
441 //System.err.println("AC ["+i+"]: "+"ZRL: 16 zeroes");
442  i+=16;
443  }
444  else {
445  // note that ssss of zero is not used for AC (unlike DC) in sequential mode
446  final int rrrr = rrrrssss >>> 4;
447  final int ssss = rrrrssss & 0x0f;
448 //System.err.println("AC ["+i+"]: rrrr="+rrrr+" ssss="+ssss);
449  final int acBits = getValueOfRequestedLength(ssss);
450  final int acValue = convertSignAndAmplitudeBitsToValue(acBits,ssss);
451 //System.err.println("AC ["+i+"]: "+rrrr+" zeroes then value "+acValue);
452  i+=rrrr; // the number of zeroes
453  ++i; // the value we read (ssss is always non-zero, so we always read something
454  }
455  }
456 
457  copying = restoreCopying;
458  }
459 
460  private final boolean redactionDecision(int colMCU,int rowMCU,int thisHorizontalSamplingFactor,int thisVerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int h,int v,Vector<Shape> redactionShapes) {
461  // only invoked for DCT so block size is always 8
462  final int vMCUSize = 8 * maxVerticalSamplingFactor;
463  final int hMCUSize = 8 * maxHorizontalSamplingFactor;
464 //System.err.println("MCUSize in pixels = "+hMCUSize+" * "+vMCUSize);
465 
466  final int hMCUOffset = colMCU * hMCUSize;
467  final int vMCUOffset = rowMCU * vMCUSize;
468 //System.err.println("MCUOffset in pixels = "+hMCUOffset+" * "+vMCUOffset);
469 
470  final int hBlockSize = 8 * maxHorizontalSamplingFactor/thisHorizontalSamplingFactor;
471  final int vBlockSize = 8 * maxVerticalSamplingFactor/thisVerticalSamplingFactor;
472 //System.err.println("BlockSize in pixels = "+hBlockSize+" * "+vBlockSize);
473 
474  final int xBlock = hMCUOffset + h * hBlockSize;
475  final int yBlock = vMCUOffset + v * vBlockSize;
476 
477  Rectangle blockShape = new Rectangle(xBlock,yBlock,hBlockSize,vBlockSize);
478 //System.err.println("blockShape "+blockShape);
479 
480  boolean redact = false;
481  if (redactionShapes != null) {
482  for (Shape redactionShape : redactionShapes) {
483  if (redactionShape.intersects(blockShape)) {
484  redact = true;
485  break;
486  }
487  }
488  }
489  return redact;
490  }
491 
492  private final void writeDecompressedPixel(int c,int decompressedPixel) throws IOException {
493  if (sof.getSamplePrecision() <= 8) {
494  decompressedOutputPerComponent[c].writeByte(decompressedPixel);
495  }
496  else {
497  // endianness handled by OutputArrayOrStream
498  decompressedOutputPerComponent[c].writeShort(decompressedPixel);
499  }
500  }
501 
502  private final void getOneMinimumCodedUnit(int nComponents,int[] DCEntropyCodingTableSelector,int[] ACEntropyCodingTableSelector,int[] HorizontalSamplingFactor,int[] VerticalSamplingFactor,int maxHorizontalSamplingFactor,int maxVerticalSamplingFactor,int colMCU,int rowMCU,Vector<Shape> redactionShapes) throws Exception, IOException {
503  for (int c=0; c<nComponents; ++c) {
504  // See discussion of interleaving of data units within MCUs in P&M section 7.3.5 pages 101-105; always interleaved in sequential mode
505  for (int v=0; v<VerticalSamplingFactor[c]; ++v) {
506  for (int h=0; h<HorizontalSamplingFactor[c]; ++h) {
507 //System.err.println("Component "+c+" v "+v+" h "+h);
508  boolean redact = redactionDecision(colMCU,rowMCU,HorizontalSamplingFactor[c],VerticalSamplingFactor[c],maxHorizontalSamplingFactor,maxVerticalSamplingFactor,h,v,redactionShapes);
509  if (isDCT) {
510  getOneDCTDataUnit(DCEntropyCodingTableSelector[c],ACEntropyCodingTableSelector[c],redact);
511  }
512  else if (isLossless) {
513  int decompressedPixel = getOneLosslessValue(c,DCEntropyCodingTableSelector[c],colMCU,rowMCU);
514  if (decompressing) {
515  writeDecompressedPixel(c,decompressedPixel);
516  }
517  }
518  else {
519  throw new Exception("Only DCT or Lossless processes supported (not "+Markers.getAbbreviation(sof.getMarker())+" "+Markers.getDescription(sof.getMarker())+")");
520  }
521  }
522  }
523  }
524  }
525 
526  private static final int max(int[] a) {
527  int m = Integer.MIN_VALUE;
528  for (int i : a) {
529  if (i > m) m = i;
530  }
531  return m;
532  }
533 
544  public final byte[] finish(byte[] bytesToDecompress,int mcuCount,int mcuOffset) throws Exception, IOException {
545  this.bytesToDecompress = bytesToDecompress;
546  availableBytes = this.bytesToDecompress.length;
547  byteIndex = 0;
548  bitIndex = 8; // force fetching byte the first time
549  haveBits = 0; // don't have any bits to start with
550 
551  if (copying) {
552  initializeWriteBits(); // will create a new ByteArrayOutputStream
553  }
554 
555  if (rowNumberAtBeginningOfRestartInterval != null) { // do not need to do this unless decompressiong lossless
556  for (int c=0; c<nComponents; ++c) {
557 //System.err.println("Setting rowNumberAtBeginningOfRestartInterval["+c+"] to "+currentRowNumber[c]);
558  rowNumberAtBeginningOfRestartInterval[c] = currentRowNumber[c]; // for lossless decompression predictor selection
559  }
560  }
561  //try {
562 
563  for (int mcu=0; mcu<mcuCount; ++mcu) {
564  int rowMCU = mcuOffset / nMCUHorizontally;
565  int colMCU = mcuOffset % nMCUHorizontally;
566 //System.err.println("MCU ("+rowMCU+","+colMCU+")");
567  getOneMinimumCodedUnit(nComponents,DCEntropyCodingTableSelector,ACEntropyCodingTableSelector,HorizontalSamplingFactor,VerticalSamplingFactor,maxHorizontalSamplingFactor,maxVerticalSamplingFactor,colMCU,rowMCU,redactionShapes);
568  ++mcuOffset;
569  }
570 
571 //System.err.println("Finished ...");
572 //System.err.println("availableBytes = "+availableBytes);
573 //System.err.println("byteIndex = "+byteIndex);
574 //System.err.println("bitIndex = "+bitIndex);
575 //System.err.println("currentByte = "+currentByte);
576 //System.err.println("currentBits = "+currentBits);
577 //System.err.println("haveBits = "+haveBits);
578 
579  //}
580  //catch (Exception e) {
581  // e.printStackTrace(System.err);
582  //}
583 
584  if (copying) {
585  flushWriteBits(); // will pad appropriately to byte boundary
586  }
587 
588  return copying ? copiedBytes.toByteArray() : null;
589  }
590 
591  private final void dumpHuffmanTables() {
592  System.err.print("\n");
593  for (HuffmanTable ht : htByClassAndIdentifer.values()) {
594  System.err.print(ht.toString());
595  }
596  }
597 
598  private final void dumpQuantizationTables() {
599  System.err.print("\n");
600  for (QuantizationTable qt : qtByIdentifer.values()) {
601  System.err.print(qt.toString());
602  }
603  }
604 
605 }
606 
EntropyCodedSegment(MarkerSegmentSOS sos, MarkerSegmentSOF sof, Map< String, HuffmanTable > htByClassAndIdentifer, Map< String, QuantizationTable > qtByIdentifer, int nMCUHorizontally, Vector< Shape > redactionShapes, boolean copying, boolean dumping, boolean decompressing, Parse.DecompressedOutput decompressedOutput)
static final boolean isDCT(int marker)
Definition: Markers.java:228
static final String getDescription(int marker)
Definition: Markers.java:384
static final boolean isHuffman(int marker)
Definition: Markers.java:211
static final boolean isLossless(int marker)
Definition: Markers.java:247
void writeByte(int b)
Writes the specified byte to this output.
static final String getAbbreviation(int marker)
Definition: Markers.java:379
final byte [] finish(byte[] bytesToDecompress, int mcuCount, int mcuOffset)
void writeShort(int s)
Writes the specified short to this output.