Sophie

Sophie

distrib > Mandriva > 2007.1 > x86_64 > by-pkgid > fdddaca718fdaac56c6bff726f3554dd > files > 21

vdr-1.4.7-9mdv2007.1.src.rpm

diff -Xorig.lst -Nu vdr-1.4.7/channels.c vdr-1.4.7.h264/channels.c
--- vdr-1.4.7/channels.c	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/channels.c	2007-11-16 21:08:58.000000000 +0200
@@ -181,6 +181,7 @@
   schedule     = NULL;
   linkChannels = NULL;
   refChannel   = NULL;
+  vcodec       = VCODEC_UNKNOWN;
 }
 
 cChannel::cChannel(const cChannel &Channel)
@@ -521,6 +522,15 @@
      }
 }
 
+void cChannel::SetVCodec(int VCodec)
+{
+  if (vcodec != VCodec) {
+     modification |= CHANNELMOD_VCODEC;
+     vcodec = VCodec;
+     Channels.SetModified();
+  }
+}
+
 void cChannel::SetCaIds(const int *CaIds)
 {
   if (caids[0] && caids[0] <= 0x00FF)
diff -Xorig.lst -Nu vdr-1.4.7/channels.h vdr-1.4.7.h264/channels.h
--- vdr-1.4.7/channels.h	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/channels.h	2007-11-16 21:08:58.000000000 +0200
@@ -25,6 +25,7 @@
 #define CHANNELMOD_CA       0x10
 #define CHANNELMOD_TRANSP   0x20
 #define CHANNELMOD_LANGS    0x40
+#define CHANNELMOD_VCODEC   0x80
 #define CHANNELMOD_RETUNE   (CHANNELMOD_PIDS | CHANNELMOD_CA | CHANNELMOD_TRANSP)
 
 #define CHANNELSMOD_NONE    0
@@ -47,6 +48,10 @@
 #define CA_ENCRYPTED_MIN 0x0100
 #define CA_ENCRYPTED_MAX 0xFFFF
 
+#define VCODEC_UNKNOWN  0
+#define VCODEC_MPEG2    1
+#define VCODEC_H264     2
+
 struct tChannelParameterMap {
   int userValue;
   int driverValue;
@@ -144,6 +149,7 @@
   int transmission;
   int guard;
   int hierarchy;
+  int vcodec;
   int __EndData__;
   int modification;
   mutable const cSchedule *schedule;
@@ -186,6 +192,7 @@
   int Tid(void) const { return tid; }
   int Sid(void) const { return sid; }
   int Rid(void) const { return rid; }
+  int VCodec(void) const { return vcodec; }
   int Number(void) const { return number; }
   void SetNumber(int Number) { number = Number; }
   bool GroupSep(void) const { return groupSep; }
@@ -217,6 +224,7 @@
   void SetPortalName(const char *PortalName);
   void SetPluginParam(const char *PluginParam);
   void SetPids(int Vpid, int Ppid, int *Apids, char ALangs[][MAXLANGCODE2], int *Dpids, char DLangs[][MAXLANGCODE2], int Tpid);
+  void SetVCodec(int VCodec);
   void SetCaIds(const int *CaIds); // list must be zero-terminated
   void SetCaDescriptors(int Level);
   void SetLinkChannels(cLinkChannels *LinkChannels);
diff -Xorig.lst -Nu vdr-1.4.7/device.c vdr-1.4.7.h264/device.c
--- vdr-1.4.7/device.c	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/device.c	2007-11-16 21:08:58.000000000 +0200
@@ -905,7 +905,7 @@
         if (CaDevice->SetChannel(Channel, false) == scrOk) { // calling SetChannel() directly, not SwitchChannel()!
            if (NeedsDetachReceivers)
               CaDevice->DetachAllReceivers();
-           cControl::Launch(new cTransferControl(CaDevice, Channel->Vpid(), Channel->Apids(), Channel->Dpids(), Channel->Spids()));
+           cControl::Launch(new cTransferControl(CaDevice, Channel->Vpid(), Channel->Apids(), Channel->Dpids(), Channel->Spids(), Channel->VCodec()));
            }
         else
            Result = scrNoTransfer;
diff -Xorig.lst -Nu vdr-1.4.7/dvbdevice.c vdr-1.4.7.h264/dvbdevice.c
--- vdr-1.4.7/dvbdevice.c	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/dvbdevice.c	2007-11-16 21:08:58.000000000 +0200
@@ -897,7 +897,7 @@
      CHECK(ioctl(fd_audio, AUDIO_SET_AV_SYNC, true));
      }
   else if (StartTransferMode)
-     cControl::Launch(new cTransferControl(this, Channel->Vpid(), Channel->Apids(), Channel->Dpids(), Channel->Spids()));
+     cControl::Launch(new cTransferControl(this, Channel->Vpid(), Channel->Apids(), Channel->Dpids(), Channel->Spids(), Channel->VCodec()));
 
   return true;
 }
diff -Xorig.lst -Nu vdr-1.4.7/h264parser.c vdr-1.4.7.h264/h264parser.c
--- vdr-1.4.7/h264parser.c	1970-01-01 02:00:00.000000000 +0200
+++ vdr-1.4.7.h264/h264parser.c	2007-11-16 21:19:07.000000000 +0200
@@ -0,0 +1,461 @@
+/*
+ * h264parser.c: a minimalistic H.264 video stream parser
+ *
+ * See the main source file 'vdr.c' for copyright information and
+ * how to reach the author.
+ *
+ * The code was originally written by Reinhard Nissl <rnissl@gmx.de>,
+ * and adapted to the VDR coding style by Klaus.Schmidinger@cadsoft.de.
+ */
+
+#include "tools.h"
+#include "h264parser.h"
+
+namespace H264
+{
+  // --- cContext ------------------------------------------------------------
+
+  int cContext::GetFramesPerSec(void) const
+  {
+    const cSequenceParameterSet *SPS = ActiveSPS();
+    const cSliceHeader *SH = CurrentSlice();
+    if (!SH || !SPS->timing_info_present_flag || !SPS->time_scale || !SPS->num_units_in_tick)
+       return -1;
+    uint32_t DeltaTfiDivisor;
+    if (SPS->pic_struct_present_flag) {
+       if (!SPS->pic_timing_sei.Defined())
+          return -1;
+       switch (SPS->pic_timing_sei.pic_struct) {
+         case 1:
+         case 2:
+              DeltaTfiDivisor = 1;
+              break;
+         case 0:
+         case 3:
+         case 4:
+              DeltaTfiDivisor = 2;
+              break;
+         case 5:
+         case 6:
+              DeltaTfiDivisor = 3;
+              break;
+         case 7:
+              DeltaTfiDivisor = 4;
+              break;
+         case 8:
+              DeltaTfiDivisor = 6;
+              break;
+         default:
+              return -1;
+         }
+       }
+    else if (!SH->field_pic_flag)
+       DeltaTfiDivisor = 2;
+    else
+       DeltaTfiDivisor = 1;
+
+    double FPS = (double)SPS->time_scale / SPS->num_units_in_tick / DeltaTfiDivisor / (SH->field_pic_flag ? 2 : 1);
+    int FramesPerSec = (int)FPS;
+    if ((FPS - FramesPerSec) >= 0.5)
+       FramesPerSec++;
+    return FramesPerSec;
+  }
+
+  // --- cSimpleBuffer -------------------------------------------------------
+
+  cSimpleBuffer::cSimpleBuffer(int Size)
+  {
+    size = Size;
+    data = new uchar[size];
+    avail = 0;
+    gotten = 0;
+  }
+
+  cSimpleBuffer::~cSimpleBuffer()
+  {
+    delete data;
+  }
+
+  int cSimpleBuffer::Put(const uchar *Data, int Count)
+  {
+    if (Count < 0) {
+       if (avail + Count < 0)
+          Count = 0 - avail;
+       if (avail + Count < gotten)
+          Count = gotten - avail;
+       avail += Count;
+       return Count;
+       }
+    if (avail + Count > size)
+       Count = size - avail;
+    memcpy(data + avail, Data, Count);
+    avail += Count;
+    return Count;
+  }
+
+  uchar *cSimpleBuffer::Get(int &Count)
+  {
+    Count = gotten = avail;
+    return data;
+  }
+
+  void cSimpleBuffer::Del(int Count)
+  {
+    if (Count < 0)
+       return;
+    if (Count > gotten) {
+       esyslog("ERROR: invalid Count in H264::cSimpleBuffer::Del: %d (limited to %d)", Count, gotten);
+       Count = gotten;
+       }
+    if (Count < avail)
+       memmove(data, data + Count, avail - Count);
+    avail -= Count;
+    gotten = 0;
+  }
+
+  void cSimpleBuffer::Clear(void)
+  {
+    avail = gotten = 0;
+  }
+
+  // --- cParser -------------------------------------------------------------
+
+  cParser::cParser(bool OmitPicTiming)
+    : nalUnitDataBuffer(1000)
+  {
+    // the above buffer size of 1000 bytes wont hold a complete NAL unit but
+    // should be sufficient for the relevant part used for parsing.
+    omitPicTiming = OmitPicTiming; // only necessary to determine frames per second
+    Reset();
+  }
+
+  void cParser::Reset(void)
+  {
+    context = cContext();
+    nalUnitDataBuffer.Clear();
+    syncing = true;
+  }
+
+  void cParser::ParseSequenceParameterSet(uint8_t *Data, int Count)
+  {
+    cSequenceParameterSet SPS;
+
+    cBitReader br(Data + 1, Count - 1);
+    uint32_t profile_idc = br.u(8);
+    /* uint32_t constraint_set0_flag = */ br.u(1);
+    /* uint32_t constraint_set1_flag = */ br.u(1);
+    /* uint32_t constraint_set2_flag = */ br.u(1);
+    /* uint32_t constraint_set3_flag = */ br.u(1);
+    /* uint32_t reserved_zero_4bits = */ br.u(4);
+    /* uint32_t level_idc = */ br.u(8);
+    SPS.seq_parameter_set_id = br.ue();
+    if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || profile_idc == 144) {
+       uint32_t chroma_format_idc = br.ue();
+       if (chroma_format_idc == 3) {
+          /* uint32_t residual_colour_transform_flag = */ br.u(1);
+          }
+       /* uint32_t bit_depth_luma_minus8 = */ br.ue();
+       /* uint32_t bit_depth_chroma_minus8 = */ br.ue();
+       /* uint32_t qpprime_y_zero_transform_bypass_flag = */ br.u(1);
+       uint32_t seq_scaling_matrix_present_flag = br.u(1);
+       if (seq_scaling_matrix_present_flag) {
+          for (int i = 0; i < 8; i++) {
+              uint32_t seq_scaling_list_present_flag = br.u(1);
+              if (seq_scaling_list_present_flag) {
+                 int sizeOfScalingList = (i < 6) ? 16 : 64;
+                 int lastScale = 8;
+                 int nextScale = 8;
+                 for (int j = 0; j < sizeOfScalingList; j++) {
+                     if (nextScale != 0) {
+                        int32_t delta_scale = br.se();
+                        nextScale = (lastScale + delta_scale + 256) % 256;
+                        }
+                     lastScale = (nextScale == 0) ? lastScale : nextScale;
+                     }
+                 }
+              }
+          }
+       }
+    SPS.log2_max_frame_num_minus4(br.ue());
+    SPS.pic_order_cnt_type = br.ue();
+    if (SPS.pic_order_cnt_type == 0)
+       SPS.log2_max_pic_order_cnt_lsb_minus4(br.ue());
+    else if (SPS.pic_order_cnt_type == 1) {
+       SPS.delta_pic_order_always_zero_flag = br.u(1);
+       /* int32_t offset_for_non_ref_pic = */ br.se();
+       /* int32_t offset_for_top_to_bottom_field = */ br.se();
+       uint32_t num_ref_frames_in_pic_order_cnt_cycle = br.ue();
+       for (uint32_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++) {
+           /* int32_t offset_for_ref_frame = */ br.se();
+           }
+       }
+    /* uint32_t num_ref_frames = */ br.ue();
+    /* uint32_t gaps_in_frame_num_value_allowed_flag = */ br.u(1);
+    /* uint32_t pic_width_in_mbs_minus1 = */ br.ue();
+    /* uint32_t pic_height_in_map_units_minus1 = */ br.ue();
+    SPS.frame_mbs_only_flag = br.u(1);
+
+    if (!omitPicTiming) {
+       if (!SPS.frame_mbs_only_flag) {
+          /* uint32_t mb_adaptive_frame_field_flag = */ br.u(1);
+          }
+       /* uint32_t direct_8x8_inference_flag = */ br.u(1);
+       uint32_t frame_cropping_flag = br.u(1);
+       if (frame_cropping_flag) {
+          /* uint32_t frame_crop_left_offset = */ br.ue();
+          /* uint32_t frame_crop_right_offset = */ br.ue();
+          /* uint32_t frame_crop_top_offset = */ br.ue();
+          /* uint32_t frame_crop_bottom_offset = */ br.ue();
+          }
+       uint32_t vui_parameters_present_flag = br.u(1);
+       if (vui_parameters_present_flag) {
+          uint32_t aspect_ratio_info_present_flag = br.u(1);
+          if (aspect_ratio_info_present_flag) {
+             uint32_t aspect_ratio_idc = br.u(8);
+             const uint32_t Extended_SAR = 255;
+             if (aspect_ratio_idc == Extended_SAR) {
+                /* uint32_t sar_width = */ br.u(16);
+                /* uint32_t sar_height = */ br.u(16);
+                }
+             }
+          uint32_t overscan_info_present_flag = br.u(1);
+          if (overscan_info_present_flag) {
+             /* uint32_t overscan_appropriate_flag = */ br.u(1);
+             }
+          uint32_t video_signal_type_present_flag = br.u(1);
+          if (video_signal_type_present_flag) {
+             /* uint32_t video_format = */ br.u(3);
+             /* uint32_t video_full_range_flag = */ br.u(1);
+             uint32_t colour_description_present_flag = br.u(1);
+             if (colour_description_present_flag) {
+                /* uint32_t colour_primaries = */ br.u(8);
+                /* uint32_t transfer_characteristics = */ br.u(8);
+                /* uint32_t matrix_coefficients = */ br.u(8);
+                }
+             }
+          uint32_t chroma_loc_info_present_flag = br.u(1);
+          if (chroma_loc_info_present_flag) {
+             /* uint32_t chroma_sample_loc_type_top_field = */ br.ue();
+             /* uint32_t chroma_sample_loc_type_bottom_field = */ br.ue();
+             }
+          SPS.timing_info_present_flag = br.u(1);
+          if (SPS.timing_info_present_flag) {
+             SPS.num_units_in_tick = br.u(32);
+             SPS.time_scale = br.u(32);
+             SPS.fixed_frame_rate_flag = br.u(1);
+             }
+          SPS.nal_hrd_parameters_present_flag = br.u(1);
+          if (SPS.nal_hrd_parameters_present_flag)
+             hrd_parameters(SPS, br);
+          SPS.vcl_hrd_parameters_present_flag = br.u(1);
+          if (SPS.vcl_hrd_parameters_present_flag)
+             hrd_parameters(SPS, br);
+          if (SPS.nal_hrd_parameters_present_flag || SPS.vcl_hrd_parameters_present_flag) {
+             /* uint32_t low_delay_hrd_flag = */ br.u(1);
+             }
+          SPS.pic_struct_present_flag = br.u(1);
+          }
+       }
+
+    context.Define(SPS);
+  }
+
+  void cParser::hrd_parameters(cSequenceParameterSet &SPS, cBitReader &br)
+  {
+    uint32_t cpb_cnt_minus1 = br.ue();
+    /* uint32_t bit_rate_scale = */ br.u(4);
+    /* uint32_t cpb_size_scale = */ br.u(4);
+    for (uint32_t i = 0; i <= cpb_cnt_minus1; i++) {
+        /* uint32_t bit_rate_value_minus1 = */ br.ue();
+        /* uint32_t cpb_size_value_minus1 = */ br.ue();
+        /* uint32_t cbr_flag = */ br.u(1);
+        }
+    /* uint32_t initial_cpb_removal_delay_length_minus1 = */ br.u(5);
+    SPS.cpb_removal_delay_length_minus1(br.u(5));
+    SPS.dpb_output_delay_length_minus1(br.u(5));
+    /* uint32_t time_offset_length = */ br.u(5);
+  }
+
+  void cParser::ParsePictureParameterSet(uint8_t *Data, int Count)
+  {
+    cPictureParameterSet PPS;
+
+    cBitReader br(Data + 1, Count - 1);
+    PPS.pic_parameter_set_id = br.ue();
+    PPS.seq_parameter_set_id = br.ue();
+    /* uint32_t entropy_coding_mode_flag = */ br.u(1);
+    PPS.pic_order_present_flag = br.u(1);
+
+    context.Define(PPS);
+  }
+
+  void cParser::ParseSlice(uint8_t *Data, int Count)
+  {
+    cSliceHeader SH;
+
+    cBitReader br(Data + 1, Count - 1);
+    SH.nal_ref_idc(Data[0] >> 5);
+    SH.nal_unit_type(Data[0] & 0x1F);
+    /* uint32_t first_mb_in_slice = */ br.ue();
+    SH.slice_type = br.ue();
+    SH.pic_parameter_set_id = br.ue();
+
+    context.ActivatePPS(SH.pic_parameter_set_id);
+    const cSequenceParameterSet *SPS = context.ActiveSPS();
+
+    SH.frame_num = br.u(SPS->log2_max_frame_num());
+    if (!SPS->frame_mbs_only_flag) {
+       SH.field_pic_flag = br.u(1);
+       if (SH.field_pic_flag)
+          SH.bottom_field_flag = br.u(1);
+       }
+    if (SH.nal_unit_type() == 5)
+       SH.idr_pic_id = br.ue();
+    if (SPS->pic_order_cnt_type == 0) {
+       SH.pic_order_cnt_lsb = br.u(SPS->log2_max_pic_order_cnt_lsb());
+       const cPictureParameterSet *PPS = context.ActivePPS();
+       if (PPS->pic_order_present_flag && !SH.field_pic_flag)
+          SH.delta_pic_order_cnt_bottom = br.se();
+       }
+    if (SPS->pic_order_cnt_type == 1 && !SPS->delta_pic_order_always_zero_flag) {
+       SH.delta_pic_order_cnt[0] = br.se();
+       const cPictureParameterSet *PPS = context.ActivePPS();
+       if (PPS->pic_order_present_flag && !SH.field_pic_flag)
+          SH.delta_pic_order_cnt[1] = br.se();
+       }
+
+    context.Define(SH);
+  }
+
+  void cParser::ParseSEI(uint8_t *Data, int Count)
+  {
+    // currently only used to determine frames per second
+    if (omitPicTiming)
+       return;
+    cBitReader br(Data + 1, Count - 1);
+    do
+      sei_message(br);
+    while (br.GetBytesAvail());
+  }
+
+  void cParser::sei_message(cBitReader &br)
+  {
+    uint32_t payloadType = 0;
+    while (1) {
+          uint32_t last_payload_type_byte = br.u(8);
+          payloadType += last_payload_type_byte;
+          if (last_payload_type_byte != 0xFF)
+             break;
+          }
+    uint32_t payloadSize = 0;
+    while (1) {
+          uint32_t last_payload_size_byte = br.u(8);
+          payloadSize += last_payload_size_byte;
+          if (last_payload_size_byte != 0xFF)
+             break;
+          }
+    sei_payload(payloadType, payloadSize, br);
+  }
+
+  void cParser::sei_payload(uint32_t payloadType, uint32_t payloadSize, cBitReader &br)
+  {
+    const cBitReader::cBookMark BookMark = br.BookMark();
+    switch (payloadType) {
+      case 0:
+           buffering_period(payloadSize, br);
+           break;
+      case 1:
+           pic_timing(payloadSize, br);
+           break;
+      }
+    // instead of dealing with trailing bits in each message
+    // go back to start of message and skip it completely
+    br.BookMark(BookMark);
+    reserved_sei_message(payloadSize, br);
+  }
+
+  void cParser::buffering_period(uint32_t payloadSize, cBitReader &br)
+  {
+    uint32_t seq_parameter_set_id = br.ue();
+
+    context.ActivateSPS(seq_parameter_set_id);
+  }
+
+  void cParser::pic_timing(uint32_t payloadSize, cBitReader &br)
+  {
+    cPictureTiming PT;
+
+    const cSequenceParameterSet *SPS = context.ActiveSPS();
+    if (!SPS)
+       return;
+    uint32_t CpbDpbDelaysPresentFlag = SPS->nal_hrd_parameters_present_flag || SPS->vcl_hrd_parameters_present_flag;
+    if (CpbDpbDelaysPresentFlag) {
+       /* uint32_t cpb_removal_delay = */ br.u(SPS->cpb_removal_delay_length());
+       /* uint32_t dpb_output_delay = */ br.u(SPS->dpb_output_delay_length());
+       }
+    if (SPS->pic_struct_present_flag) {
+       PT.pic_struct = br.u(4);
+       }
+
+    context.Define(PT);
+  }
+
+  void cParser::reserved_sei_message(uint32_t payloadSize, cBitReader &br)
+  {
+    for (uint32_t i = 0; i < payloadSize; i++) {
+        /* uint32_t reserved_sei_message_payload_byte = */ br.u(8);
+        }
+  }
+
+  void cParser::PutNalUnitData(const uchar *Data, int Count)
+  {
+    int n = nalUnitDataBuffer.Put(Data, Count);
+    // typically less than a complete NAL unit are needed for parsing the
+    // relevant data, so simply ignore the overflow condition.
+    if (false && n != Count)
+       esyslog("ERROR: H264::cParser::PutNalUnitData(): NAL unit data buffer overflow");
+  }
+
+  void cParser::Process()
+  {
+    // nalUnitDataBuffer contains the head of the current NAL unit -- let's parse it 
+    int Count = 0;
+    uchar *Data = nalUnitDataBuffer.Get(Count);
+    if (Data && Count >= 4) {
+       if (Data[0] == 0x00 && Data[1] == 0x00 && Data[2] == 0x01) {
+          int nal_unit_type = Data[3] & 0x1F;
+          try {
+              switch (nal_unit_type) {
+                case 1: // coded slice of a non-IDR picture
+                case 2: // coded slice data partition A
+                case 5: // coded slice of an IDR picture
+                     ParseSlice(Data + 3, Count - 3);
+                     break;
+                case 6: // supplemental enhancement information (SEI)
+                     ParseSEI(Data + 3, Count - 3);
+                     break;
+                case 7: // sequence parameter set
+                     syncing = false; // from now on, we should get reliable results
+                     ParseSequenceParameterSet(Data + 3, Count - 3);
+                     break;
+                case 8: // picture parameter set
+                     ParsePictureParameterSet(Data + 3, Count - 3);
+                     break;
+                }
+              }
+          catch (cException *e) {
+              if (!syncing) // suppress typical error messages while syncing
+                 esyslog(e->Message());
+              delete e;
+              }
+          }
+       else if (!syncing)
+          esyslog("ERROR: H264::cParser::Process(): NAL unit data buffer content is invalid");
+       }
+    else if (!syncing)
+       esyslog("ERROR: H264::cParser::Process(): NAL unit data buffer content is too short");
+    // reset the buffer for the next NAL unit
+    nalUnitDataBuffer.Clear();
+  }
+}
+
diff -Xorig.lst -Nu vdr-1.4.7/h264parser.h vdr-1.4.7.h264/h264parser.h
--- vdr-1.4.7/h264parser.h	1970-01-01 02:00:00.000000000 +0200
+++ vdr-1.4.7.h264/h264parser.h	2007-11-16 21:19:07.000000000 +0200
@@ -0,0 +1,397 @@
+/*
+ * h264parser.h: a minimalistic H.264 video stream parser
+ *
+ * See the main source file 'vdr.c' for copyright information and
+ * how to reach the author.
+ */
+
+#ifndef __H264PARSER_H
+#define __H264PARSER_H
+
+namespace H264
+{
+  // --- cException ----------------------------------------------------------
+
+  class cException {
+  private:
+    cString message;
+  public:
+    cException(const cString &Message) { message = Message; }
+    const cString &Message(void) const { return message; }
+  };
+
+  // --- cBitReader ----------------------------------------------------------
+
+  class cBitReader {
+  public:
+    class cBookMark {
+    private:
+      uint8_t *data;
+      int count;
+      uint32_t bits;
+      uint32_t bitsAvail;
+      int countZeros;
+      cBookMark(void) {}
+      friend class cBitReader;
+    };
+  private:
+    cBookMark bm;
+    uint8_t NextByte(void);
+    uint32_t ReadBits(uint32_t n);
+  public:
+    cBitReader(uint8_t *Data, int Count);
+    uint32_t u(uint32_t n) { return ReadBits(n); } // read n bits as unsigned number
+    uint32_t ue(void); // read Exp-Golomb coded unsigned number
+    int32_t se(void); // read Exp-Golomb coded signed number
+    uint32_t GetBitsAvail(void) { return (bm.bitsAvail & 0x07); }
+    bool GetBytesAvail(void) { return (bm.count > 0); }
+    const cBookMark BookMark(void) const { return bm; }
+    void BookMark(const cBookMark &b) { bm = b; }
+  };
+
+  inline cBitReader::cBitReader(unsigned char *Data, int Count)
+  {
+    bm.data = Data;
+    bm.count = Count;
+    bm.bitsAvail = 0;
+    bm.countZeros = 0;
+  }
+
+  inline uint8_t cBitReader::NextByte(void)
+  {
+    if (bm.count < 1) // there is no more data left in this NAL unit
+       throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+    // detect 00 00 00, 00 00 01 and 00 00 03 and handle them
+    if (*bm.data == 0x00) {
+       if (bm.countZeros >= 3) // 00 00 00: the current NAL unit should have been terminated already before this sequence
+          throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+       // increase the zero counter as we have a zero byte
+       bm.countZeros++;
+       }
+    else {
+       if (bm.countZeros >= 2) {
+          if (*bm.data == 0x01) // 00 00 01: the current NAL unit should have been terminated already before this sequence
+             throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+          if (*bm.data == 0x03) {
+             // 00 00 03 xx: the emulation prevention byte 03 needs to be removed and xx must be returned
+             if (bm.count < 2)
+                throw new cException("ERROR: H264::cBitReader::NextByte(): premature end of data");
+             // drop 03 and xx will be returned below
+             bm.count--;
+             bm.data++;
+             }
+          }
+       // reset the zero counter as we had a non zero byte
+       bm.countZeros = 0;
+       }
+    bm.count--;
+    return *bm.data++;
+  }
+
+  inline uint32_t cBitReader::ReadBits(uint32_t n)
+  {
+    // fill the "shift register" bits with sufficient data
+    while (n > bm.bitsAvail) {
+          bm.bits <<= 8;
+          bm.bits |= NextByte();
+          bm.bitsAvail += 8;
+          if (bm.bitsAvail > 24) { // a further turn will overflow bitbuffer
+             if (n <= bm.bitsAvail)
+                break; // service non overflowing request
+             if (n <= 32) // split overflowing reads into concatenated reads 
+                return (ReadBits(16) << 16) | ReadBits(n - 16);
+             // cannot read more than 32 bits at once
+             throw new cException("ERROR: H264::cBitReader::ReadBits(): bitbuffer overflow");
+             }
+          }
+    // return n most significant bits
+    bm.bitsAvail -= n;
+    return (bm.bits >> bm.bitsAvail) & (((uint32_t)1 << n) - 1);
+  }
+
+  inline uint32_t cBitReader::ue(void)
+  {
+    // read and decode an Exp-Golomb coded unsigned number
+    //
+    // bitstring             resulting number
+    //       1               0
+    //     0 1 x             1 ... 2
+    //   0 0 1 x y           3 ... 6
+    // 0 0 0 1 x y z         7 ... 14
+    // ...
+    int LeadingZeroBits = 0;
+    while (ReadBits(1) == 0)
+          LeadingZeroBits++;
+    if (LeadingZeroBits == 0)
+       return 0;
+    if (LeadingZeroBits >= 32)
+       throw new cException("ERROR: H264::cBitReader::ue(): overflow");
+    return ((uint32_t)1 << LeadingZeroBits) - 1 + ReadBits(LeadingZeroBits);
+  }
+
+  inline int32_t cBitReader::se(void)
+  {
+    // read and decode an Exp-Golomb coded signed number
+    //
+    // unsigned value       resulting signed value
+    // 0                     0
+    // 1                    +1
+    // 2                    -1
+    // 3                    +2
+    // 4                    -2
+    // ...
+    uint32_t r = ue();
+    if (r > 0xFFFFFFFE)
+       throw new cException("ERROR: H264::cBitReader::se(): overflow");
+    return (1 - 2 * (r & 1)) * ((r + 1) / 2);
+  }
+
+  // --- cPictureTiming ------------------------------------------------------
+
+  class cPictureTiming {
+  private:
+    friend class cContext;
+    bool defined;
+  public:
+    cPictureTiming(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) const { return defined; }
+    uint32_t pic_struct;
+  };
+
+  // --- cSequenceParameterSet -----------------------------------------------
+
+  class cSequenceParameterSet {
+  private:
+    friend class cContext;
+    bool defined;
+    uint32_t log2MaxFrameNum;
+    uint32_t log2MaxPicOrderCntLsb;
+    uint32_t cpbRemovalDelayLength;
+    uint32_t dpbOutputDelayLength;
+  public:
+    cSequenceParameterSet(void);
+    bool Defined(void) { return defined; }
+    void log2_max_frame_num_minus4(uint32_t Value) { log2MaxFrameNum = Value + 4; }
+    uint32_t log2_max_frame_num_minus4(void) const { return log2MaxFrameNum - 4; }
+    uint32_t log2_max_frame_num(void) const { return log2MaxFrameNum; }
+    void log2_max_pic_order_cnt_lsb_minus4(uint32_t Value) { log2MaxPicOrderCntLsb = Value + 4; }
+    uint32_t log2_max_pic_order_cnt_lsb_minus4(void) const { return log2MaxPicOrderCntLsb - 4; }
+    uint32_t log2_max_pic_order_cnt_lsb(void) const { return log2MaxPicOrderCntLsb; }
+    void cpb_removal_delay_length_minus1(uint32_t Value) { cpbRemovalDelayLength = Value + 1; }
+    uint32_t cpb_removal_delay_length_minus1(void) const { return cpbRemovalDelayLength - 1; }
+    uint32_t cpb_removal_delay_length(void) const { return cpbRemovalDelayLength; }
+    void dpb_output_delay_length_minus1(uint32_t Value) { dpbOutputDelayLength = Value + 1; }
+    uint32_t dpb_output_delay_length_minus1(void) const { return dpbOutputDelayLength - 1; }
+    uint32_t dpb_output_delay_length(void) const { return dpbOutputDelayLength; }
+    uint32_t seq_parameter_set_id;
+    uint32_t pic_order_cnt_type;
+    uint32_t delta_pic_order_always_zero_flag;
+    uint32_t frame_mbs_only_flag;
+    uint32_t timing_info_present_flag;
+    uint32_t num_units_in_tick;
+    uint32_t time_scale;
+    uint32_t fixed_frame_rate_flag;
+    uint32_t nal_hrd_parameters_present_flag;
+    uint32_t vcl_hrd_parameters_present_flag;
+    uint32_t pic_struct_present_flag;
+    cPictureTiming pic_timing_sei;
+  };
+
+  inline cSequenceParameterSet::cSequenceParameterSet(void)
+  {
+    memset(this, 0, sizeof (*this));
+    log2_max_frame_num_minus4(0);
+    log2_max_pic_order_cnt_lsb_minus4(0);
+    cpb_removal_delay_length_minus1(23);
+    dpb_output_delay_length_minus1(23);
+  }
+
+  // --- cPictureParameterSet ------------------------------------------------
+
+  class cPictureParameterSet {
+  private:
+    friend class cContext;
+    bool defined;
+  public:
+    cPictureParameterSet(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) { return defined; }
+    uint32_t pic_parameter_set_id;
+    uint32_t seq_parameter_set_id;
+    uint32_t pic_order_present_flag;
+  };
+
+  // --- cSliceHeader --------------------------------------------------------
+
+  class cSliceHeader {
+  private:
+    friend class cContext;
+    bool defined;
+    bool isFirstSliceOfCurrentAccessUnit;
+    uint32_t picOrderCntType;
+    uint32_t nalRefIdc;
+    uint32_t nalUnitType;
+  public:
+    cSliceHeader(void) { memset(this, 0, sizeof (*this)); }
+    bool Defined(void) const { return defined; }
+    bool IsFirstSliceOfCurrentAccessUnit(void) const { return isFirstSliceOfCurrentAccessUnit; }
+    void nal_ref_idc(uint32_t Value) { nalRefIdc = Value; }
+    uint32_t nal_ref_idc(void) const { return nalRefIdc; }
+    void nal_unit_type(uint32_t Value) { nalUnitType = Value; }
+    uint32_t nal_unit_type(void) const { return nalUnitType; }
+    uint32_t slice_type;
+    uint32_t pic_parameter_set_id;
+    uint32_t frame_num;
+    uint32_t field_pic_flag;
+    uint32_t bottom_field_flag;
+    uint32_t idr_pic_id;
+    uint32_t pic_order_cnt_lsb;
+    int32_t delta_pic_order_cnt_bottom;
+    int32_t delta_pic_order_cnt[2];
+    enum eAccessUnitType {
+      Frame = 0,
+      TopField,
+      BottomField
+      };
+    eAccessUnitType GetAccessUnitType() const { return (eAccessUnitType)(field_pic_flag + bottom_field_flag); }
+  };
+
+  // --- cContext ------------------------------------------------------------
+
+  class cContext {
+  private:
+    cSequenceParameterSet spsStore[32];
+    cPictureParameterSet ppsStore[256];
+    cSequenceParameterSet *sps; // active Sequence Parameter Set
+    cPictureParameterSet *pps; // active Picture Parameter Set
+    cSliceHeader sh;
+  public:
+    cContext(void) { sps = 0; pps = 0; }
+    void Define(cSequenceParameterSet &SPS);
+    void Define(cPictureParameterSet &PPS);
+    void Define(cSliceHeader &SH);
+    void Define(cPictureTiming &PT);
+    void ActivateSPS(uint32_t ID);
+    void ActivatePPS(uint32_t ID);
+    const cSequenceParameterSet *ActiveSPS(void) const { return sps; }
+    const cPictureParameterSet *ActivePPS(void) const { return pps; }
+    const cSliceHeader *CurrentSlice(void) const { return sh.Defined() ? &sh : 0; }
+    int GetFramesPerSec(void) const;
+  };
+
+  inline void cContext::ActivateSPS(uint32_t ID)
+  {
+    if (ID >= (sizeof (spsStore) / sizeof (*spsStore)))
+       throw new cException("ERROR: H264::cContext::ActivateSPS(): id out of range");
+    if (!spsStore[ID].Defined())
+       throw new cException("ERROR: H264::cContext::ActivateSPS(): requested SPS is undefined");
+    sps = &spsStore[ID];
+  }
+
+  inline void cContext::ActivatePPS(uint32_t ID)
+  {
+    if (ID >= (sizeof (ppsStore) / sizeof (*ppsStore)))
+       throw new cException("ERROR: H264::cContext::ActivatePPS(): id out of range");
+    if (!ppsStore[ID].Defined())
+       throw new cException("ERROR: H264::cContext::ActivatePPS(): requested PPS is undefined");
+    pps = &ppsStore[ID];
+    ActivateSPS(pps->seq_parameter_set_id);
+  }
+
+  inline void cContext::Define(cSequenceParameterSet &SPS)
+  {
+    if (SPS.seq_parameter_set_id >= (sizeof (spsStore) / sizeof (*spsStore)))
+       throw new cException("ERROR: H264::cContext::DefineSPS(): id out of range");
+    SPS.defined = true;
+    spsStore[SPS.seq_parameter_set_id] = SPS;
+  }
+
+  inline void cContext::Define(cPictureParameterSet &PPS)
+  {
+    if (PPS.pic_parameter_set_id >= (sizeof (ppsStore) / sizeof (*ppsStore)))
+       throw new cException("ERROR: H264::cContext::DefinePPS(): id out of range");
+    PPS.defined = true;
+    ppsStore[PPS.pic_parameter_set_id] = PPS;
+  }
+
+  inline void cContext::Define(cSliceHeader &SH)
+  {
+    SH.defined = true;
+    SH.picOrderCntType = ActiveSPS()->pic_order_cnt_type;
+
+    // ITU-T Rec. H.264 (03/2005): 7.4.1.2.4
+    SH.isFirstSliceOfCurrentAccessUnit = !sh.Defined()
+      || (sh.frame_num                  != SH.frame_num)
+      || (sh.pic_parameter_set_id       != SH.pic_parameter_set_id)
+      || (sh.field_pic_flag             != SH.field_pic_flag)
+      || (sh.bottom_field_flag          != SH.bottom_field_flag)
+      || (sh.nalRefIdc                  != SH.nalRefIdc
+      && (sh.nalRefIdc == 0             || SH.nalRefIdc == 0))
+      || (sh.picOrderCntType == 0       && SH.picOrderCntType == 0
+      && (sh.pic_order_cnt_lsb          != SH.pic_order_cnt_lsb
+      ||  sh.delta_pic_order_cnt_bottom != SH.delta_pic_order_cnt_bottom))
+      || (sh.picOrderCntType == 1       && SH.picOrderCntType == 1
+      && (sh.delta_pic_order_cnt[0]     != SH.delta_pic_order_cnt[0]
+      ||  sh.delta_pic_order_cnt[1]     != SH.delta_pic_order_cnt[1]))
+      || (sh.nalUnitType                != SH.nalUnitType
+      && (sh.nalUnitType == 5           || SH.nalUnitType == 5))
+      || (sh.nalUnitType == 5           && SH.nalUnitType == 5
+      &&  sh.idr_pic_id                 != SH.idr_pic_id);
+        
+    sh = SH;
+  }
+
+  inline void cContext::Define(cPictureTiming &PT)
+  {
+    PT.defined = true;
+    ((cSequenceParameterSet *)ActiveSPS())->pic_timing_sei = PT;
+  }
+
+  // --- cSimpleBuffer -------------------------------------------------------
+
+  class cSimpleBuffer {
+  private:
+    uchar *data;
+    int size;
+    int avail;
+    int gotten;
+  public:
+    cSimpleBuffer(int Size);
+    ~cSimpleBuffer();
+    int Size(void) { return size; }
+    int Available(void) { return avail; }
+    int Free(void) { return size - avail; }
+    int Put(const uchar *Data, int Count);
+    uchar *Get(int &Count);
+    void Del(int Count);
+    void Clear(void);
+  };
+
+  // --- cParser -------------------------------------------------------------
+
+  class cParser {
+  private:
+    bool syncing;
+    bool omitPicTiming;
+    cContext context;
+    cSimpleBuffer nalUnitDataBuffer;
+    void hrd_parameters(cSequenceParameterSet &SPS, cBitReader &br);
+    void ParseSequenceParameterSet(uint8_t *Data, int Count);
+    void ParsePictureParameterSet(uint8_t *Data, int Count);
+    void ParseSlice(uint8_t *Data, int Count);
+    void reserved_sei_message(uint32_t payloadSize, cBitReader &br);
+    void pic_timing(uint32_t payloadSize, cBitReader &br);
+    void buffering_period(uint32_t payloadSize, cBitReader &br);
+    void sei_payload(uint32_t payloadType, uint32_t payloadSize, cBitReader &br);
+    void sei_message(cBitReader &br);
+    void ParseSEI(uint8_t *Data, int Count);
+  public:
+    cParser(bool OmitPicTiming = true);
+    const cContext &Context(void) const { return context; }
+    void PutNalUnitData(const uchar *Data, int Count);
+    void Reset(void);
+    void Process(void);
+  };
+}
+
+#endif // __H264PARSER_H
+
Yhteiset alihakemistot: vdr-1.4.7/libsi ja vdr-1.4.7.h264/libsi
diff -Xorig.lst -Nu vdr-1.4.7/Makefile vdr-1.4.7.h264/Makefile
--- vdr-1.4.7/Makefile	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/Makefile	2007-11-16 21:14:23.000000000 +0200
@@ -37,7 +37,7 @@
        lirc.o menu.o menuitems.o nit.o osdbase.o osd.o pat.o player.o plugin.o rcu.o\
        receiver.o recorder.o recording.o remote.o remux.o ringbuffer.o sdt.o sections.o\
        skinclassic.o skins.o skinsttng.o sources.o spu.o status.o svdrp.o themes.o thread.o\
-       timers.o tools.o transfer.o vdr.o videodir.o submenu.o
+       timers.o tools.o transfer.o vdr.o videodir.o submenu.o h264parser.o
 
 OBJS += osdcontroller.o rcontroller.o dvbsub.o vdrttxtsubshooks.o
 
diff -Xorig.lst -Nu vdr-1.4.7/menu.c vdr-1.4.7.h264/menu.c
--- vdr-1.4.7/menu.c	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/menu.c	2007-11-16 21:36:37.000000000 +0200
@@ -253,6 +253,14 @@
 
   Clear();
 
+  static const char *VideoCodecValues[] = {
+     tr("unknown"),
+     tr("MPEG1/2"),
+     tr("H.264")
+     };
+ 
+
+
   // Parameters for all types of sources:
   strn0cpy(name, data.name, sizeof(name));
   strn0cpy(pluginParam, data.pluginParam, sizeof(pluginParam));
@@ -260,6 +268,7 @@
   Add(new cMenuEditSrcItem( tr("Source"),       &data.source));
   Add(new cMenuEditIntItem( tr("Frequency"),    &data.frequency));
   Add(new cMenuEditIntItem( tr("Vpid"),         &data.vpid,  0, 0x1FFF));
+  Add(new cMenuEditStraItem(tr("Video Coding"), &data.vcodec, sizeof(VideoCodecValues) / sizeof(*VideoCodecValues), VideoCodecValues));
   Add(new cMenuEditIntItem( tr("Ppid"),         &data.ppid,  0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Apid1"),        &data.apids[0], 0, 0x1FFF));
   Add(new cMenuEditIntItem( tr("Apid2"),        &data.apids[1], 0, 0x1FFF));
@@ -4172,7 +4181,7 @@
      const cChannel *ch = timer->Channel();
      cTtxtSubsRecorderBase *subsRecorder = cVDRTtxtsubsHookListener::Hook()->NewTtxtSubsRecorder(device, ch);
      int SubPids[3] = {DvbSubtitlesRecording.GetPidByChannel(device->DeviceNumber(), ch, 1), DvbSubtitlesRecording.GetPidByChannel(device->DeviceNumber(), ch, 2), 0};
-     recorder = new cRecorder(fileName, ch->Ca(), timer->Priority(), ch->Vpid(), ch->Apids(), ch->Dpids(), SubPids, subsRecorder);
+     recorder = new cRecorder(fileName, ch->Ca(), timer->Priority(), ch->Vpid(), ch->Apids(), ch->Dpids(), SubPids, subsRecorder, ch->VCodec());
      if (device->AttachReceiver(recorder)) {
         if (subsRecorder) subsRecorder->DeviceAttach();
         Recording.WriteInfo();
diff -Xorig.lst -Nu vdr-1.4.7/pat.c vdr-1.4.7.h264/pat.c
--- vdr-1.4.7/pat.c	2006-03-31 15:39:34.000000000 +0300
+++ vdr-1.4.7.h264/pat.c	2007-11-16 21:08:58.000000000 +0200
@@ -341,6 +341,12 @@
               case 1: // STREAMTYPE_11172_VIDEO
               case 2: // STREAMTYPE_13818_VIDEO
                       Vpid = stream.getPid();
+		      Channel->SetVCodec(VCODEC_MPEG2);
+                      break;
+ 	      case 0x19: // advanced codec HD digital television service
+ 	      case 0x1b: // ISO/IEC 14496-10 Video (MPEG-4 part 10/AVC, aka H.264)
+                      Vpid = stream.getPid();
+		      Channel->SetVCodec(VCODEC_H264);
                       break;
               case 3: // STREAMTYPE_11172_AUDIO
               case 4: // STREAMTYPE_13818_AUDIO
Yhteiset alihakemistot: vdr-1.4.7/PLUGINS ja vdr-1.4.7.h264/PLUGINS
diff -Xorig.lst -Nu vdr-1.4.7/recorder.c vdr-1.4.7.h264/recorder.c
--- vdr-1.4.7/recorder.c	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/recorder.c	2007-11-16 21:13:07.000000000 +0200
@@ -136,7 +136,7 @@
         }
 }
 
-cRecorder::cRecorder(const char *FileName, int Ca, int Priority, int VPid, const int *APids, const int *DPids, const int *SPids, cTtxtSubsRecorderBase *tsr)
+cRecorder::cRecorder(const char *FileName, int Ca, int Priority, int VPid, const int *APids, const int *DPids, const int *SPids, cTtxtSubsRecorderBase *tsr, int VCodec)
 :cReceiver(Ca, Priority, VPid, APids, Setup.RecordDolbyDigital ? DPids : NULL, SPids)
 ,cThread("recording")
 {
@@ -146,7 +146,7 @@
 
   ringBuffer = new cRingBufferLinear(RECORDERBUFSIZE, TS_SIZE * 2, true, "Recorder");
   ringBuffer->SetTimeouts(0, 100);
-  remux = new cRemux(VPid, APids, Setup.RecordDolbyDigital ? DPids : NULL, SPids, true);
+  remux = new cRemux(VPid, APids, Setup.RecordDolbyDigital ? DPids : NULL, SPids, true, VCodec);
   writer = new cFileWriter(FileName, remux, tsr);
 }
 
diff -Xorig.lst -Nu vdr-1.4.7/recorder.h vdr-1.4.7.h264/recorder.h
--- vdr-1.4.7/recorder.h	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/recorder.h	2007-11-16 21:08:58.000000000 +0200
@@ -29,7 +29,7 @@
   virtual void Receive(uchar *Data, int Length);
   virtual void Action(void);
 public:
-  cRecorder(const char *FileName, int Ca, int Priority, int VPid, const int *APids, const int *DPids, const int *SPids, cTtxtSubsRecorderBase *tsr);
+  cRecorder(const char *FileName, int Ca, int Priority, int VPid, const int *APids, const int *DPids, const int *SPids, cTtxtSubsRecorderBase *tsr, int VCodec=0);
                // Creates a new recorder that requires conditional access Ca, has
                // the given Priority and will record the given PIDs into the file FileName.
   virtual ~cRecorder();
diff -Xorig.lst -Nu vdr-1.4.7/remux.c vdr-1.4.7.h264/remux.c
--- vdr-1.4.7/remux.c	2007-11-16 21:19:46.000000000 +0200
+++ vdr-1.4.7.h264/remux.c	2007-11-16 21:08:58.000000000 +0200
@@ -19,6 +19,8 @@
 #include "channels.h"
 #include "thread.h"
 #include "tools.h"
+#include "recording.h"
+#include "h264parser.h"
 
 ePesHeader AnalyzePesHeader(const uchar *Data, int Count, int &PesPayloadOffset, bool *ContinuationHeader)
 {
@@ -100,8 +102,9 @@
   int suppressedLogMessages;
   bool LogAllowed(void);
   void DroppedData(const char *Reason, int Count) { LOG("%s (dropped %d bytes)", Reason, Count); }
+  virtual int Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
 public:
-  static int Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
+  static int PutAllOrNothing(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
   cRepacker(void);
   virtual ~cRepacker() {}
   virtual void Reset(void) { initiallySyncing = true; }
@@ -138,6 +141,11 @@
 
 int cRepacker::Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded)
 {
+  return PutAllOrNothing(ResultBuffer, Data, Count, CapacityNeeded);
+}
+
+int cRepacker::PutAllOrNothing(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded)
+{
   if (CapacityNeeded >= Count && ResultBuffer->Free() < CapacityNeeded) {
      esyslog("ERROR: possible result buffer overflow, dropped %d out of %d byte", CapacityNeeded, CapacityNeeded);
      return 0;
@@ -156,7 +164,7 @@
   int packetTodo;
   uchar fragmentData[6 + 65535 + 3];
   int fragmentLen;
-  uchar pesHeader[6 + 3 + 255 + 3];
+  uchar pesHeader[6 + 3 + 255 + 5 + 3]; // 5: H.264 AUD
   int pesHeaderLen;
   uchar pesHeaderBackup[6 + 3 + 255];
   int pesHeaderBackupLen;
@@ -164,7 +172,7 @@
   uint32_t localScanner;
   int localStart;
   bool PushOutPacket(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count);
-  virtual int QuerySnoopSize() { return 4; }
+  virtual int QuerySnoopSize(void) { return 4; }
   virtual void Reset(void);
   };
 
@@ -238,6 +246,132 @@
   return true;
 }
 
+// --- cAudGenerator ---------------------------------------------------------
+
+class cAudGenerator {
+private:
+  H264::cSimpleBuffer buffer;
+  int overflowByteCount;
+  H264::cSliceHeader::eAccessUnitType accessUnitType;
+  int sliceTypes;
+public:
+  cAudGenerator(void);
+  void CollectSliceType(const H264::cSliceHeader *SH);
+  int CollectData(const uchar *Data, int Count);
+  void Generate(cRingBufferLinear *const ResultBuffer);
+};
+
+cAudGenerator::cAudGenerator()
+  : buffer(MAXFRAMESIZE)
+{
+  overflowByteCount = 0;
+  accessUnitType = H264::cSliceHeader::Frame;
+  sliceTypes = 0;
+}
+
+int cAudGenerator::CollectData(const uchar *Data, int Count)
+{
+  // buffer frame data until AUD can be generated
+  int n = buffer.Put(Data, Count);
+  overflowByteCount += (Count - n);
+  // always report "success" as an error message will be shown in Generate()
+  return Count;
+}
+
+void cAudGenerator::CollectSliceType(const H264::cSliceHeader *SH)
+{
+  if (!SH)
+     return;
+  // remember type of current access unit 
+  accessUnitType = SH->GetAccessUnitType();
+  // translate slice_type into part of primary_pic_type and merge them
+  switch (SH->slice_type) {
+    case 2: // I
+    case 7: // I only => I 
+         sliceTypes |= 0x10000;
+         break;
+    case 0: // P
+    case 5: // P only => I, P
+         sliceTypes |= 0x11000;
+         break;
+    case 1: // B
+    case 6: // B only => I, P, B
+         sliceTypes |= 0x11100;
+         break;
+    case 4: // SI
+    case 9: // SI only => SI
+         sliceTypes |= 0x00010;
+         break;
+    case 3: // SP
+    case 8: // SP only => SI, SP
+         sliceTypes |= 0x00011;
+         break;
+    }
+}
+
+void cAudGenerator::Generate(cRingBufferLinear *const ResultBuffer)
+{
+  int primary_pic_type;
+  // translate the merged primary_pic_type parts into primary_pic_type
+  switch (sliceTypes) {
+    case 0x10000: // I
+         primary_pic_type = 0;
+         break;
+    case 0x11000: // I, P
+         primary_pic_type = 1;
+         break;
+    case 0x11100: // I, P, B
+         primary_pic_type = 2;
+         break;
+    case 0x00010: // SI
+         primary_pic_type = 3;
+         break;
+    case 0x00011: // SI, SP
+         primary_pic_type = 4;
+         break;
+    case 0x10010: // I, SI
+         primary_pic_type = 5;
+         break;
+    case 0x11011: // I, SI, P, SP
+    case 0x10011: // I, SI, SP
+    case 0x11010: // I, SI, P
+         primary_pic_type = 6;
+         break;
+    case 0x11111: // I, SI, P, SP, B
+    case 0x11110: // I, SI, P, B
+         primary_pic_type = 7;
+         break;
+    default:
+         primary_pic_type = -1; // frame without slices?
+    }
+  // drop an incorrect frame
+  if (primary_pic_type < 0)
+     esyslog("ERROR: cAudGenerator::Generate(): dropping frame without slices");
+  else {
+     // drop a partitial frame
+     if (overflowByteCount > 0) 
+        esyslog("ERROR: cAudGenerator::Generate(): frame exceeds MAXFRAMESIZE bytes (required size: %d bytes), dropping frame", buffer.Size() + overflowByteCount);
+     else {
+        int Count;
+        uchar *Data = buffer.Get(Count);
+        int PesPayloadOffset = 0;
+        AnalyzePesHeader(Data, Count, PesPayloadOffset);
+        // enter primary_pic_type into AUD
+        Data[ PesPayloadOffset + 4 ] |= primary_pic_type << 5;
+        // mangle the "start code" to pass the information that this access unit is a
+        // bottom field to ScanVideoPacket() where this modification will be reverted.
+        if (accessUnitType == H264::cSliceHeader::BottomField)
+           Data[ PesPayloadOffset + 3 ] |= 0x80;
+        // store the buffered frame
+        cRepacker::PutAllOrNothing(ResultBuffer, Data, Count, Count);
+        }
+     }
+  // prepare for next run
+  buffer.Clear();
+  overflowByteCount = 0;
+  sliceTypes = 0;
+}
+
 // --- cVideoRepacker --------------------------------------------------------
 
 class cVideoRepacker : public cCommonRepacker {
@@ -248,6 +382,13 @@
     scanPicture
     };
   int state;
+  H264::cParser *h264Parser;
+  int sliceSeen;
+  bool audSeen;
+  cAudGenerator *audGenerator;
+  void CheckAudGeneration(bool SliceNalUnitType, bool SyncPoint, const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
+  void PushOutCurrentFrameAndStartNewPacket(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
+  void HandleNalUnit(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel, const uchar *&NalPayload);
   void HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel);
   inline bool ScanDataForStartCodeSlow(const uchar *const Data);
   inline bool ScanDataForStartCodeFast(const uchar *&Data, const uchar *Limit);
@@ -256,30 +397,155 @@
   inline bool ScanForEndOfPictureSlow(const uchar *&Data);
   inline bool ScanForEndOfPictureFast(const uchar *&Data, const uchar *Limit);
   inline bool ScanForEndOfPicture(const uchar *&Data, const uchar *Limit);
+  void CollectNalUnitData(const uchar *Data, int Count);
+protected:
+  virtual int Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded);
 public:
-  cVideoRepacker(void);
+  cVideoRepacker(bool H264);
+  ~cVideoRepacker();
   virtual void Reset(void);
   virtual void Repack(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count);
   virtual int BreakAt(const uchar *Data, int Count);
   };
 
-cVideoRepacker::cVideoRepacker(void)
+cVideoRepacker::cVideoRepacker(bool H264)
 {
+  h264Parser = (H264 ? new H264::cParser() : 0);
+  audGenerator = 0;
   Reset();
 }
 
+cVideoRepacker::~cVideoRepacker()
+{
+  delete h264Parser;
+  delete audGenerator;
+}
+
 void cVideoRepacker::Reset(void)
 {
   cCommonRepacker::Reset();
+  if (h264Parser)
+     h264Parser->Reset();
   scanner = 0xFFFFFFFF;
   state = syncing;
+  sliceSeen = -1;
+  audSeen = false;
+  delete audGenerator;
+  audGenerator = 0;
 }
 
-void cVideoRepacker::HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+int cVideoRepacker::Put(cRingBufferLinear *ResultBuffer, const uchar *Data, int Count, int CapacityNeeded)
 {
-  // synchronisation is detected some bytes after frame start.
-  const int SkippedBytesLimit = 4;
+  if (!audGenerator)
+     return cCommonRepacker::Put(ResultBuffer, Data, Count, CapacityNeeded);
+
+  return audGenerator->CollectData(Data, Count);
+}
+
+void cVideoRepacker::CollectNalUnitData(const uchar *Data, int Count)
+{
+  if (h264Parser)
+     h264Parser->PutNalUnitData(Data, Count);
+}
+
+void cVideoRepacker::HandleNalUnit(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel, const uchar *&NalPayload)
+{
+  // valid NAL units start with a zero bit
+  if (*Data & 0x80) {
+     LOG("cVideoRepacker: found invalid NAL unit: stream seems to be scrambled or not demultiplexed");
+     return;
+     }
+
+  // collect NAL unit's remaining data and process it 
+  CollectNalUnitData(NalPayload, Data - 3 - NalPayload);
+  h264Parser->Process();
+
+  // collect 0x00 0x00 0x01 for current NAL unit
+  static const uchar InitPayload[3] = { 0x00, 0x00, 0x01 };
+  CollectNalUnitData(InitPayload, sizeof (InitPayload));
+  NalPayload = Data;
+
+  // which kind of NAL unit have we got?
+  const int nal_unit_type = *Data & 0x1F;
+  switch (nal_unit_type) {
+    case 1: // coded slice of a non-IDR picture
+    case 2: // coded slice data partition A
+    case 5: // coded slice of an IDR picture
+         CheckAudGeneration(true, false, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         break;
+    case 3: // coded slice data partition B
+    case 4: // coded slice data partition C
+    case 19: // coded slice of an auxiliary coded picture without partitioning
+         break;
+    case 6: // supplemental enhancement information (SEI)
+    case 7: // sequence parameter set
+    case 8: // picture parameter set
+    case 10: // end of sequence
+    case 11: // end of stream
+    case 13: // sequence parameter set extension
+         CheckAudGeneration(false, nal_unit_type == 7, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         break;
+    case 12: // filler data
+         break;
+    case 14 ... 18: // reserved
+         CheckAudGeneration(false, false, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+    case 20 ... 23: // reserved
+         LOG("cVideoRepacker: found reserved NAL unit type: stream seems to be scrambled");
+         break;
+    case 0: // unspecified
+    case 24 ... 31: // unspecified
+         LOG("cVideoRepacker: found unspecified NAL unit type: stream seems to be scrambled");
+         break;
+    case 9: { // access unit delimiter
+         audSeen = true;
+         CheckAudGeneration(false, true, Data, ResultBuffer, Payload, StreamID, MpegLevel);
+         // mangle the "start code" to pass the information that the next access unit will be
+         // a bottom field to ScanVideoPacket() where this modification will be reverted.
+         const H264::cSliceHeader *SH = h264Parser->Context().CurrentSlice();
+         if (SH && SH->GetAccessUnitType() == H264::cSliceHeader::TopField)
+            *(uchar *)Data |= 0x80;
+         }
+         break;
+    }
+}
+
+void cVideoRepacker::CheckAudGeneration(bool SliceNalUnitType, bool SyncPoint, const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
+  // we cannot generate anything until we have reached the synchronisation point
+  if (sliceSeen < 0 && !SyncPoint)
+     return;
+  // detect transition from slice to non-slice NAL units
+  const bool WasSliceSeen = (sliceSeen != false);
+  const bool IsSliceSeen = SliceNalUnitType;
+  sliceSeen = IsSliceSeen;
+  // collect slice types for AUD generation
+  if (WasSliceSeen && audGenerator)
+     audGenerator->CollectSliceType(h264Parser->Context().CurrentSlice());
+  // handle access unit delimiter at the transition from slice to non-slice NAL units
+  if (WasSliceSeen && !IsSliceSeen) {
+     // an Access Unit Delimiter indicates that the current picture is done. So let's
+     // push out the current frame to start a new packet for the next picture.
+     PushOutCurrentFrameAndStartNewPacket(Data, ResultBuffer, Payload, StreamID, MpegLevel);
+     if (state == findPicture) {
+        // go on with scanning the picture data
+        state++;
+        }
+     // generate the AUD and push out the buffered frame
+     if (audGenerator) {
+        audGenerator->Generate(ResultBuffer);
+        if (audSeen) {
+           // we nolonger need to generate AUDs as they are part of the stream
+           delete audGenerator;
+           audGenerator = 0;
+           }
+        }
+     else if (!audSeen) // we do need to generate AUDs
+        audGenerator = new cAudGenerator;
+     }
+}
 
+void cVideoRepacker::HandleStartCode(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
   // which kind of start code have we got?
   switch (*Data) {
     case 0xB9 ... 0xFF: // system start codes
@@ -298,65 +564,9 @@
     case 0xB3: // sequence header code
     case 0xB8: // group start code
     case 0x00: // picture start code
-         if (state == scanPicture) {
-            // the above start codes indicate that the current picture is done. So
-            // push out the packet to start a new packet for the next picuture. If
-            // the byte count get's negative then the current buffer ends in a
-            // partitial start code that must be stripped off, as it shall be put
-            // in the next packet.
-            PushOutPacket(ResultBuffer, Payload, Data - 3 - Payload);
-            // go on with syncing to the next picture
-            state = syncing;
-            }
-         if (state == syncing) {
-            if (initiallySyncing) // omit report for the typical initial case
-               initiallySyncing = false;
-            else if (skippedBytes > SkippedBytesLimit) // report that syncing dropped some bytes
-               LOG("cVideoRepacker: skipped %d bytes to sync on next picture", skippedBytes - SkippedBytesLimit);
-            skippedBytes = 0;
-            // if there is a PES header available, then use it ...
-            if (pesHeaderBackupLen > 0) {
-               // ISO 13818-1 says:
-               // In the case of video, if a PTS is present in a PES packet header
-               // it shall refer to the access unit containing the first picture start
-               // code that commences in this PES packet. A picture start code commences
-               // in PES packet if the first byte of the picture start code is present
-               // in the PES packet.
-               memcpy(pesHeader, pesHeaderBackup, pesHeaderBackupLen);
-               pesHeaderLen = pesHeaderBackupLen;
-               pesHeaderBackupLen = 0;
-               }
-            else {
-               // ... otherwise create a continuation PES header
-               pesHeaderLen = 0;
-               pesHeader[pesHeaderLen++] = 0x00;
-               pesHeader[pesHeaderLen++] = 0x00;
-               pesHeader[pesHeaderLen++] = 0x01;
-               pesHeader[pesHeaderLen++] = StreamID; // video stream ID
-               pesHeader[pesHeaderLen++] = 0x00; // length still unknown
-               pesHeader[pesHeaderLen++] = 0x00; // length still unknown
-
-               if (MpegLevel == phMPEG2) {
-                  pesHeader[pesHeaderLen++] = 0x80;
-                  pesHeader[pesHeaderLen++] = 0x00;
-                  pesHeader[pesHeaderLen++] = 0x00;
-                  }
-               else
-                  pesHeader[pesHeaderLen++] = 0x0F;
-               }
-            // append the first three bytes of the start code
-            pesHeader[pesHeaderLen++] = 0x00;
-            pesHeader[pesHeaderLen++] = 0x00;
-            pesHeader[pesHeaderLen++] = 0x01;
-            // the next packet's payload will begin with the fourth byte of
-            // the start code (= the actual code)
-            Payload = Data;
-            // as there is no length information available, assume the
-            // maximum we can hold in one PES packet
-            packetTodo = maxPacketSize - pesHeaderLen;
-            // go on with finding the picture data
-            state++;
-            }
+         // the above start codes indicate that the current picture is done. So let's
+         // push out the current frame to start a new packet for the next picture.
+         PushOutCurrentFrameAndStartNewPacket(Data, ResultBuffer, Payload, StreamID, MpegLevel);
          break;
     case 0x01 ... 0xAF: // slice start codes
          if (state == findPicture) {
@@ -367,6 +577,81 @@
     }
 }
 
+void cVideoRepacker::PushOutCurrentFrameAndStartNewPacket(const uchar *const Data, cRingBufferLinear *const ResultBuffer, const uchar *&Payload, const uchar StreamID, const ePesHeader MpegLevel)
+{
+  // synchronisation is detected some bytes after frame start.
+  const int SkippedBytesLimit = 4;
+
+  if (state == scanPicture) {
+     // picture data has been found so let's push out the current frame.
+     // If the byte count get's negative then the current buffer ends in a
+     // partitial start code that must be stripped off, as it shall be put
+     // in the next packet.
+     PushOutPacket(ResultBuffer, Payload, Data - 3 - Payload);
+     // go on with syncing to the next picture
+     state = syncing;
+     }
+  // when already synced to a picture, just go on collecting data 
+  if (state != syncing)
+     return;
+  // we're synced to a picture so prepare a new packet
+  if (initiallySyncing) // omit report for the typical initial case
+     initiallySyncing = false;
+  else if (skippedBytes > SkippedBytesLimit) // report that syncing dropped some bytes
+     LOG("cVideoRepacker: skipped %d bytes to sync on next picture", skippedBytes - SkippedBytesLimit);
+  skippedBytes = 0;
+  // if there is a PES header available, then use it ...
+  if (pesHeaderBackupLen > 0) {
+     // ISO 13818-1 says:
+     // In the case of video, if a PTS is present in a PES packet header
+     // it shall refer to the access unit containing the first picture start
+     // code that commences in this PES packet. A picture start code commences
+     // in PES packet if the first byte of the picture start code is present
+     // in the PES packet.
+     memcpy(pesHeader, pesHeaderBackup, pesHeaderBackupLen);
+     pesHeaderLen = pesHeaderBackupLen;
+     pesHeaderBackupLen = 0;
+     }
+  else {
+     // ... otherwise create a continuation PES header
+     pesHeaderLen = 0;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x01;
+     pesHeader[pesHeaderLen++] = StreamID; // video stream ID
+     pesHeader[pesHeaderLen++] = 0x00; // length still unknown
+     pesHeader[pesHeaderLen++] = 0x00; // length still unknown
+
+     if (MpegLevel == phMPEG2) {
+        pesHeader[pesHeaderLen++] = 0x80;
+        pesHeader[pesHeaderLen++] = 0x00;
+        pesHeader[pesHeaderLen++] = 0x00;
+        }
+     else
+        pesHeader[pesHeaderLen++] = 0x0F;
+     }
+  // add an AUD in H.264 mode when not present in stream
+  if (h264Parser && !audSeen) {
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x00;
+     pesHeader[pesHeaderLen++] = 0x01;
+     pesHeader[pesHeaderLen++] = 0x09; // access unit delimiter
+     pesHeader[pesHeaderLen++] = 0x10; // will be filled later
+     }
+  // append the first three bytes of the start code
+  pesHeader[pesHeaderLen++] = 0x00;
+  pesHeader[pesHeaderLen++] = 0x00;
+  pesHeader[pesHeaderLen++] = 0x01;
+  // the next packet's payload will begin with the fourth byte of
+  // the start code (= the actual code)
+  Payload = Data;
+  // as there is no length information available, assume the
+  // maximum we can hold in one PES packet
+  packetTodo = maxPacketSize - pesHeaderLen;
+  // go on with finding the picture data
+  state++;
+}
+
 bool cVideoRepacker::ScanDataForStartCodeSlow(const uchar *const Data)
 {
   scanner <<= 8;
@@ -458,14 +743,19 @@
   const uchar *data = Data + done;
   // remember start of the data
   const uchar *payload = data;
+  const uchar *NalPayload = payload;
 
   while (todo > 0) {
         // collect number of skipped bytes while syncing
         if (state <= syncing)
            skippedBytes++;
         // did we reach a start code?
-        if (ScanDataForStartCode(data, done, todo))
-           HandleStartCode(data, ResultBuffer, payload, Data[3], mpegLevel);
+        if (ScanDataForStartCode(data, done, todo)) {
+           if (h264Parser)
+              HandleNalUnit(data, ResultBuffer, payload, Data[3], mpegLevel, NalPayload);
+           else
+              HandleStartCode(data, ResultBuffer, payload, Data[3], mpegLevel);
+           }
         // move on
         data++;
         done++;
@@ -568,6 +858,8 @@
         fragmentLen += bite;
         }
      }
+  // always collect remaining NAL unit data (may be needed for syncing)
+  CollectNalUnitData(NalPayload, data - NalPayload);
   // report that syncing dropped some bytes
   if (skippedBytes > SkippedBytesLimit) {
      if (!initiallySyncing) // omit report for the typical initial case
@@ -581,13 +873,22 @@
   localScanner <<= 8;
   localScanner |= *Data++;
   // check start codes which follow picture data
-  switch (localScanner) {
-    case 0x00000100: // picture start code
-    case 0x000001B8: // group start code
-    case 0x000001B3: // sequence header code
-    case 0x000001B7: // sequence end code
-         return true;
-    }
+  if (h264Parser) {
+     int nal_unit_type = localScanner & 0x1F;
+     switch (nal_unit_type) {
+       case 9: // access unit delimiter
+            return true;
+       }
+     }
+  else {
+     switch (localScanner) {
+       case 0x00000100: // picture start code
+       case 0x000001B8: // group start code
+       case 0x000001B3: // sequence header code
+       case 0x000001B7: // sequence end code
+            return true;
+       }
+     }
   return false;
 }
 
@@ -601,15 +902,27 @@
         else {
            localScanner = 0x00000100 | *++Data;
            // check start codes which follow picture data
-           switch (localScanner) {
-             case 0x00000100: // picture start code
-             case 0x000001B8: // group start code
-             case 0x000001B3: // sequence header code
-             case 0x000001B7: // sequence end code
-                  Data++;
-                  return true;
-             default:
-                  Data += 3;
+           if (h264Parser) {
+              int nal_unit_type = localScanner & 0x1F;
+              switch (nal_unit_type) {
+                case 9: // access unit delimiter
+                     Data++;
+                     return true;
+                default:
+                     Data += 3;
+                }
+              }
+           else {
+              switch (localScanner) {
+                case 0x00000100: // picture start code
+                case 0x000001B8: // group start code
+                case 0x000001B3: // sequence header code
+                case 0x000001B7: // sequence end code
+                     Data++;
+                     return true;
+                default:
+                     Data += 3;
+                }
              }
            }
         }
@@ -1529,7 +1842,7 @@
   if (repacker)
      repacker->Repack(resultBuffer, Data, Count);
   else
-     cRepacker::Put(resultBuffer, Data, Count, Count);
+     cRepacker::PutAllOrNothing(resultBuffer, Data, Count, Count);
 }
 
 void cTS2PES::reset_ipack(void)
@@ -1896,8 +2209,9 @@
 
 #define RESULTBUFFERSIZE KILOBYTE(256)
 
-cRemux::cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure)
+cRemux::cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure, int VCodec)
 {
+  h264 = (VCodec == VCODEC_H264);
   exitOnFailure = ExitOnFailure;
   isRadio = VPid == 0 || VPid == 1 || VPid == 0x1FFF;
   numUPTerrors = 0;
@@ -1910,7 +2224,7 @@
   if (VPid)
 #define TEST_cVideoRepacker
 #ifdef TEST_cVideoRepacker
-     ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0, 0x00, new cVideoRepacker);
+     ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0, 0x00, new cVideoRepacker(h264));
 #else
      ts2pes[numTracks++] = new cTS2PES(VPid, resultBuffer, IPACKS, 0xE0);
 #endif
@@ -1960,6 +2274,23 @@
   return -1;
 }
 
+bool cRemux::IsFrameH264(const uchar *Data, int Length)
+{
+  int PesPayloadOffset;
+  const uchar *limit = Data + Length;
+  if (AnalyzePesHeader(Data, Length, PesPayloadOffset) <= phInvalid)
+     return false; // neither MPEG1 nor MPEG2
+
+  Data += PesPayloadOffset + 3; // move to video payload and skip 00 00 01
+  if (Data < limit) {
+     // cVideoRepacker ensures that in case of H264 we will see an access unit delimiter here
+     if (0x01 == Data[-1] && 9 == Data[0] && 0x00 == Data[-2] && 0x00 == Data[-3])
+        return true;
+     }
+
+  return false;
+}
+
 int cRemux::ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType)
 {
   // Scans the video packet starting at Offset and returns its length.
@@ -1978,23 +2309,67 @@
            if (p[-2] || p[-1] || p[0] != 0x01)
               pLimit = 0; // skip scanning: packet doesn't start with 0x000001
            else {
-              switch (p[1]) {
-                case SC_SEQUENCE:
-                case SC_GROUP:
-                case SC_PICTURE:
-                     break;
-                default: // skip scanning: packet doesn't start a new sequence, group or picture
-                     pLimit = 0;
-                }
+              if (h264) {
+                 int nal_unit_type = p[1] & 0x1F;
+                 switch (nal_unit_type) {
+                   case 9: // access unit delimiter
+                        // when the MSB in p[1] is set (which violates H.264) then this is a hint
+                        // from cVideoRepacker::HandleNalUnit() that this bottom field shall not
+                        // be reported as picture.
+                        if (p[1] & 0x80)
+                           ((uchar *)p)[1] &= ~0x80; // revert the hint and fall through
+                        else
+                           break;
+                   default: // skip scanning: packet doesn't start a new picture
+                        pLimit = 0;
+                   }
+                 }
+              else {
+                 switch (p[1]) {
+                   case SC_SEQUENCE:
+                   case SC_GROUP:
+                   case SC_PICTURE:
+                        break;
+                   default: // skip scanning: packet doesn't start a new sequence, group or picture
+                        pLimit = 0;
+                   }
+                 }
               }
            }
 #endif
         while (p < pLimit && (p = (const uchar *)memchr(p, 0x01, pLimit - p))) {
               if (!p[-2] && !p[-1]) { // found 0x000001
-                 switch (p[1]) {
-                   case SC_PICTURE: PictureType = (p[3] >> 3) & 0x07;
-                                    return Length;
-                   }
+                 if (h264) {
+                    int nal_unit_type = p[1] & 0x1F;
+                    switch (nal_unit_type) {
+                      case 9: { // access unit delimiter
+                              int primary_pic_type = p[2] >> 5;
+                              switch (primary_pic_type) {
+                                case 0: // I
+                                case 3: // SI
+                                case 5: // I, SI
+                                     PictureType = I_FRAME;
+                                     break;
+                                case 1: // I, P
+                                case 4: // SI, SP
+                                case 6: // I, SI, P, SP
+                                     PictureType = P_FRAME;
+                                     break;
+                                case 2: // I, P, B
+                                case 7: // I, SI, P, SP, B
+                                     PictureType = B_FRAME;
+                                     break;
+                                }
+                              return Length;
+                              }
+                      }
+                    }
+                 else {
+                    switch (p[1]) {
+                      case SC_PICTURE: PictureType = (p[3] >> 3) & 0x07;
+                                       return Length;
+                      }
+                    }
                  p += 4; // continue scanning after 0x01ssxxyy
                  }
               else
diff -Xorig.lst -Nu vdr-1.4.7/remux.h vdr-1.4.7.h264/remux.h
--- vdr-1.4.7/remux.h	2006-03-25 14:27:30.000000000 +0200
+++ vdr-1.4.7.h264/remux.h	2007-11-16 21:08:58.000000000 +0200
@@ -46,8 +46,10 @@
   cRingBufferLinear *resultBuffer;
   int resultSkipped;
   int GetPid(const uchar *Data);
+  int ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType);
 public:
-  cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure = false);
+  bool h264;
+  cRemux(int VPid, const int *APids, const int *DPids, const int *SPids, bool ExitOnFailure = false, int vcodec=0);
        ///< Creates a new remuxer for the given PIDs. VPid is the video PID, while
        ///< APids, DPids and SPids are pointers to zero terminated lists of audio,
        ///< dolby and subtitle PIDs (the pointers may be NULL if there is no such
@@ -78,7 +80,7 @@
        ///< settings as they are.
   static void SetBrokenLink(uchar *Data, int Length);
   static int GetPacketLength(const uchar *Data, int Count, int Offset);
-  static int ScanVideoPacket(const uchar *Data, int Count, int Offset, uchar &PictureType);
+  static bool IsFrameH264(const uchar *Data, int Length);
   };
 
 #endif // __REMUX_H
diff -Xorig.lst -Nu vdr-1.4.7/sdt.c vdr-1.4.7.h264/sdt.c
--- vdr-1.4.7/sdt.c	2006-04-15 17:12:21.000000000 +0300
+++ vdr-1.4.7.h264/sdt.c	2007-11-16 21:08:58.000000000 +0200
@@ -55,6 +55,7 @@
                    case 0x02: // digital radio sound service
                    case 0x04: // NVOD reference service
                    case 0x05: // NVOD time-shifted service
+                   case 0x19: // digital HD television service
                         {
                         char NameBuf[1024];
                         char ShortNameBuf[1024];
Yhteiset alihakemistot: vdr-1.4.7/symbols ja vdr-1.4.7.h264/symbols
diff -Xorig.lst -Nu vdr-1.4.7/transfer.c vdr-1.4.7.h264/transfer.c
--- vdr-1.4.7/transfer.c	2006-01-29 19:24:39.000000000 +0200
+++ vdr-1.4.7.h264/transfer.c	2007-11-16 21:08:58.000000000 +0200
@@ -14,12 +14,12 @@
 
 // --- cTransfer -------------------------------------------------------------
 
-cTransfer::cTransfer(int VPid, const int *APids, const int *DPids, const int *SPids)
+cTransfer::cTransfer(int VPid, const int *APids, const int *DPids, const int *SPids, int VCodec)
 :cReceiver(0, -1, VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids)
 ,cThread("transfer")
 {
   ringBuffer = new cRingBufferLinear(TRANSFERBUFSIZE, TS_SIZE * 2, true, "Transfer");
-  remux = new cRemux(VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids);
+  remux = new cRemux(VPid, APids, Setup.UseDolbyDigital ? DPids : NULL, SPids, false, VCodec);
 }
 
 cTransfer::~cTransfer()
@@ -110,8 +110,8 @@
 
 cDevice *cTransferControl::receiverDevice = NULL;
 
-cTransferControl::cTransferControl(cDevice *ReceiverDevice, int VPid, const int *APids, const int *DPids, const int *SPids)
-:cControl(transfer = new cTransfer(VPid, APids, DPids, SPids), true)
+cTransferControl::cTransferControl(cDevice *ReceiverDevice, int VPid, const int *APids, const int *DPids, const int *SPids, int VCodec)
+:cControl(transfer = new cTransfer(VPid, APids, DPids, SPids, VCodec), true)
 {
   ReceiverDevice->AttachReceiver(transfer);
   receiverDevice = ReceiverDevice;
diff -Xorig.lst -Nu vdr-1.4.7/transfer.h vdr-1.4.7.h264/transfer.h
--- vdr-1.4.7/transfer.h	2006-01-29 19:24:43.000000000 +0200
+++ vdr-1.4.7.h264/transfer.h	2007-11-16 21:08:58.000000000 +0200
@@ -25,7 +25,7 @@
   virtual void Receive(uchar *Data, int Length);
   virtual void Action(void);
 public:
-  cTransfer(int VPid, const int *APids, const int *DPids, const int *SPids);
+  cTransfer(int VPid, const int *APids, const int *DPids, const int *SPids, int VCodec = 0);
   virtual ~cTransfer();
   };
 
@@ -34,7 +34,7 @@
   cTransfer *transfer;
   static cDevice *receiverDevice;
 public:
-  cTransferControl(cDevice *ReceiverDevice, int VPid, const int *APids, const int *DPids, const int *SPids);
+  cTransferControl(cDevice *ReceiverDevice, int VPid, const int *APids, const int *DPids, const int *SPids, int VCodec = 0);
   ~cTransferControl();
   virtual void Hide(void) {}
   static cDevice *ReceiverDevice(void) { return receiverDevice; }