Showing preview only (3,900K chars total). Download the full file or copy to clipboard to get everything.
Repository: spex66/RTSP-Camera-for-Android
Branch: master
Commit: ce20a50dd019
Files: 388
Total size: 3.6 MB
Directory structure:
gitextract_nchk1d3j/
├── README.md
├── RtspCamera/
│ ├── .classpath
│ ├── .gitignore
│ ├── .project
│ ├── .settings/
│ │ └── org.jboss.ide.eclipse.as.core.prefs
│ ├── AndroidManifest.xml
│ ├── docs/
│ │ └── wire-udp4000-h264-with-inband-sps.pps-nativeencoder.pcap
│ ├── gpl.txt
│ ├── jni/
│ │ ├── Android.mk
│ │ ├── Application.mk
│ │ ├── avc_h264/
│ │ │ ├── Android.mk
│ │ │ ├── common/
│ │ │ │ ├── include/
│ │ │ │ │ ├── avcapi_common.h
│ │ │ │ │ ├── avcint_common.h
│ │ │ │ │ └── avclib_common.h
│ │ │ │ └── src/
│ │ │ │ ├── deblock.cpp
│ │ │ │ ├── dpb.cpp
│ │ │ │ ├── fmo.cpp
│ │ │ │ ├── mb_access.cpp
│ │ │ │ └── reflist.cpp
│ │ │ ├── dec/
│ │ │ │ ├── Android.mk
│ │ │ │ ├── include/
│ │ │ │ │ ├── avcdec_api.h
│ │ │ │ │ ├── pvavcdecoder.h
│ │ │ │ │ ├── pvavcdecoder_factory.h
│ │ │ │ │ └── pvavcdecoderinterface.h
│ │ │ │ └── src/
│ │ │ │ ├── 3GPVideoParser.cpp
│ │ │ │ ├── 3GPVideoParser.h
│ │ │ │ ├── NativeH264Decoder.cpp
│ │ │ │ ├── NativeH264Decoder.h
│ │ │ │ ├── avc_bitstream.cpp
│ │ │ │ ├── avcdec_api.cpp
│ │ │ │ ├── avcdec_bitstream.h
│ │ │ │ ├── avcdec_int.h
│ │ │ │ ├── avcdec_lib.h
│ │ │ │ ├── header.cpp
│ │ │ │ ├── itrans.cpp
│ │ │ │ ├── pred_inter.cpp
│ │ │ │ ├── pred_intra.cpp
│ │ │ │ ├── pvavcdecoder.cpp
│ │ │ │ ├── pvavcdecoder_factory.cpp
│ │ │ │ ├── residual.cpp
│ │ │ │ ├── slice.cpp
│ │ │ │ ├── vlc.cpp
│ │ │ │ ├── yuv2rgb.cpp
│ │ │ │ └── yuv2rgb.h
│ │ │ ├── enc/
│ │ │ │ ├── Android.mk
│ │ │ │ ├── include/
│ │ │ │ │ ├── pvavcencoder.h
│ │ │ │ │ ├── pvavcencoder_factory.h
│ │ │ │ │ └── pvavcencoderinterface.h
│ │ │ │ └── src/
│ │ │ │ ├── NativeH264Encoder.cpp
│ │ │ │ ├── NativeH264Encoder.cpp__orig
│ │ │ │ ├── NativeH264Encoder.h
│ │ │ │ ├── avcenc_api.cpp
│ │ │ │ ├── avcenc_api.h
│ │ │ │ ├── avcenc_int.h
│ │ │ │ ├── avcenc_lib.h
│ │ │ │ ├── bitstream_io.cpp
│ │ │ │ ├── block.cpp
│ │ │ │ ├── findhalfpel.cpp
│ │ │ │ ├── header.cpp
│ │ │ │ ├── init.cpp
│ │ │ │ ├── intra_est.cpp
│ │ │ │ ├── motion_comp.cpp
│ │ │ │ ├── motion_est.cpp
│ │ │ │ ├── pvavcencoder.cpp
│ │ │ │ ├── pvavcencoder_factory.cpp
│ │ │ │ ├── rate_control.cpp
│ │ │ │ ├── residual.cpp
│ │ │ │ ├── sad.cpp
│ │ │ │ ├── sad_halfpel.cpp
│ │ │ │ ├── sad_halfpel_inline.h
│ │ │ │ ├── sad_inline.h
│ │ │ │ ├── sad_mb_offset.h
│ │ │ │ ├── slice.cpp
│ │ │ │ └── vlc_encode.cpp
│ │ │ └── oscl/
│ │ │ ├── oscl_base.h
│ │ │ ├── oscl_base_macros.h
│ │ │ ├── oscl_config.h
│ │ │ ├── oscl_dll.h
│ │ │ ├── oscl_error.h
│ │ │ ├── oscl_error_codes.h
│ │ │ ├── oscl_exception.h
│ │ │ ├── oscl_math.h
│ │ │ ├── oscl_mem.h
│ │ │ ├── oscl_string.h
│ │ │ ├── oscl_types.h
│ │ │ └── osclconfig_compiler_warnings.h
│ │ └── m4v_h263/
│ │ ├── Android.mk
│ │ ├── dec/
│ │ │ ├── Android.mk
│ │ │ ├── include/
│ │ │ │ ├── mp4dec_api.h
│ │ │ │ ├── pvm4vdecoder.h
│ │ │ │ ├── pvm4vdecoder_dpi.h
│ │ │ │ ├── pvm4vdecoder_factory.h
│ │ │ │ ├── pvvideodecoderinterface.h
│ │ │ │ └── visual_header.h
│ │ │ ├── oscl/
│ │ │ │ ├── oscl_base.h
│ │ │ │ ├── oscl_base_macros.h
│ │ │ │ ├── oscl_config.h
│ │ │ │ ├── oscl_dll.h
│ │ │ │ ├── oscl_error.h
│ │ │ │ ├── oscl_error_codes.h
│ │ │ │ ├── oscl_exception.h
│ │ │ │ ├── oscl_math.h
│ │ │ │ ├── oscl_mem.h
│ │ │ │ ├── oscl_types.h
│ │ │ │ └── osclconfig_compiler_warnings.h
│ │ │ └── src/
│ │ │ ├── 3GPVideoParser.cpp
│ │ │ ├── 3GPVideoParser.h
│ │ │ ├── adaptive_smooth_no_mmx.cpp
│ │ │ ├── bitstream.cpp
│ │ │ ├── bitstream.h
│ │ │ ├── block_idct.cpp
│ │ │ ├── cal_dc_scaler.cpp
│ │ │ ├── chv_filter.cpp
│ │ │ ├── chvr_filter.cpp
│ │ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.cpp
│ │ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.h
│ │ │ ├── combined_decode.cpp
│ │ │ ├── conceal.cpp
│ │ │ ├── datapart_decode.cpp
│ │ │ ├── dcac_prediction.cpp
│ │ │ ├── dec_pred_intra_dc.cpp
│ │ │ ├── deringing_chroma.cpp
│ │ │ ├── deringing_luma.cpp
│ │ │ ├── find_min_max.cpp
│ │ │ ├── get_pred_adv_b_add.cpp
│ │ │ ├── get_pred_outside.cpp
│ │ │ ├── idct.cpp
│ │ │ ├── idct.h
│ │ │ ├── idct_vca.cpp
│ │ │ ├── max_level.h
│ │ │ ├── mb_motion_comp.cpp
│ │ │ ├── mb_utils.cpp
│ │ │ ├── mbtype_mode.h
│ │ │ ├── motion_comp.h
│ │ │ ├── mp4dec_lib.h
│ │ │ ├── mp4def.h
│ │ │ ├── mp4lib_int.h
│ │ │ ├── packet_util.cpp
│ │ │ ├── post_filter.cpp
│ │ │ ├── post_proc.h
│ │ │ ├── post_proc_semaphore.cpp
│ │ │ ├── pp_semaphore_chroma_inter.cpp
│ │ │ ├── pp_semaphore_luma.cpp
│ │ │ ├── pvdec_api.cpp
│ │ │ ├── pvm4vdecoder.cpp
│ │ │ ├── pvm4vdecoder_factory.cpp
│ │ │ ├── scaling.h
│ │ │ ├── scaling_tab.cpp
│ │ │ ├── vlc_dec_tab.h
│ │ │ ├── vlc_decode.cpp
│ │ │ ├── vlc_decode.h
│ │ │ ├── vlc_dequant.cpp
│ │ │ ├── vlc_tab.cpp
│ │ │ ├── vop.cpp
│ │ │ ├── yuv2rgb.cpp
│ │ │ ├── yuv2rgb.h
│ │ │ ├── zigzag.h
│ │ │ └── zigzag_tab.cpp
│ │ └── enc/
│ │ ├── Android.mk
│ │ ├── include/
│ │ │ ├── cvei.h
│ │ │ ├── mp4enc_api.h
│ │ │ └── pvm4vencoder.h
│ │ ├── oscl/
│ │ │ ├── oscl_base.h
│ │ │ ├── oscl_base_macros.h
│ │ │ ├── oscl_config.h
│ │ │ ├── oscl_dll.h
│ │ │ ├── oscl_error.h
│ │ │ ├── oscl_error_codes.h
│ │ │ ├── oscl_exception.h
│ │ │ ├── oscl_math.h
│ │ │ ├── oscl_mem.h
│ │ │ ├── oscl_types.h
│ │ │ └── osclconfig_compiler_warnings.h
│ │ └── src/
│ │ ├── bitstream_io.cpp
│ │ ├── bitstream_io.h
│ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.cpp
│ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.h
│ │ ├── combined_encode.cpp
│ │ ├── datapart_encode.cpp
│ │ ├── dct.cpp
│ │ ├── dct.h
│ │ ├── dct_inline.h
│ │ ├── fastcodemb.cpp
│ │ ├── fastcodemb.h
│ │ ├── fastidct.cpp
│ │ ├── fastquant.cpp
│ │ ├── fastquant_inline.h
│ │ ├── findhalfpel.cpp
│ │ ├── m4venc_oscl.h
│ │ ├── me_utils.cpp
│ │ ├── motion_comp.cpp
│ │ ├── motion_est.cpp
│ │ ├── mp4def.h
│ │ ├── mp4enc_api.cpp
│ │ ├── mp4enc_api.cpp.original
│ │ ├── mp4enc_lib.h
│ │ ├── mp4lib_int.h
│ │ ├── pvm4vencoder.cpp
│ │ ├── rate_control.cpp
│ │ ├── rate_control.h
│ │ ├── sad.cpp
│ │ ├── sad_halfpel.cpp
│ │ ├── sad_halfpel_inline.h
│ │ ├── sad_inline.h
│ │ ├── sad_mb_offset.h
│ │ ├── vlc_enc_tab.h
│ │ ├── vlc_encode.cpp
│ │ ├── vlc_encode.h
│ │ ├── vlc_encode_inline.h
│ │ └── vop.cpp
│ ├── proguard.cfg
│ ├── project.properties
│ ├── res/
│ │ ├── layout/
│ │ │ ├── cameraapicodecs.xml
│ │ │ └── cameranativecodecs.xml
│ │ └── values/
│ │ └── strings.xml
│ └── src/
│ ├── com/
│ │ └── orangelabs/
│ │ └── rcs/
│ │ ├── core/
│ │ │ ├── CoreException.java
│ │ │ └── ims/
│ │ │ └── protocol/
│ │ │ └── rtp/
│ │ │ ├── CodecChain.java
│ │ │ ├── MediaRegistry.java
│ │ │ ├── MediaRtpReceiver.java
│ │ │ ├── Processor.java
│ │ │ ├── RtpException.java
│ │ │ ├── codec/
│ │ │ │ ├── Codec.java
│ │ │ │ └── video/
│ │ │ │ ├── VideoCodec.java
│ │ │ │ ├── h263/
│ │ │ │ │ ├── H263Config.java
│ │ │ │ │ ├── H263RtpHeader.java
│ │ │ │ │ ├── JavaDepacketizer.java
│ │ │ │ │ ├── JavaPacketizer.java
│ │ │ │ │ ├── decoder/
│ │ │ │ │ │ ├── NativeH263Decoder.java
│ │ │ │ │ │ └── VideoSample.java
│ │ │ │ │ └── encoder/
│ │ │ │ │ ├── NativeH263Encoder.java
│ │ │ │ │ └── NativeH263EncoderParams.java
│ │ │ │ └── h264/
│ │ │ │ ├── H264Config.java
│ │ │ │ ├── decoder/
│ │ │ │ │ └── NativeH264Decoder.java
│ │ │ │ └── encoder/
│ │ │ │ └── NativeH264Encoder.java
│ │ │ ├── core/
│ │ │ │ ├── RtcpAppPacket.java
│ │ │ │ ├── RtcpByePacket.java
│ │ │ │ ├── RtcpCompoundPacket.java
│ │ │ │ ├── RtcpPacket.java
│ │ │ │ ├── RtcpPacketReceiver.java
│ │ │ │ ├── RtcpPacketTransmitter.java
│ │ │ │ ├── RtcpPacketUtils.java
│ │ │ │ ├── RtcpReceiverReportPacket.java
│ │ │ │ ├── RtcpReport.java
│ │ │ │ ├── RtcpSdesBlock.java
│ │ │ │ ├── RtcpSdesItem.java
│ │ │ │ ├── RtcpSdesPacket.java
│ │ │ │ ├── RtcpSenderReportPacket.java
│ │ │ │ ├── RtcpSession.java
│ │ │ │ ├── RtcpStatisticsReceiver.java
│ │ │ │ ├── RtcpStatisticsTransmitter.java
│ │ │ │ ├── RtpPacket.java
│ │ │ │ ├── RtpPacketReceiver.java
│ │ │ │ ├── RtpPacketTransmitter.java
│ │ │ │ ├── RtpSource.java
│ │ │ │ ├── RtpStatisticsReceiver.java
│ │ │ │ └── RtpStatisticsTransmitter.java
│ │ │ ├── event/
│ │ │ │ ├── RtcpApplicationEvent.java
│ │ │ │ ├── RtcpByeEvent.java
│ │ │ │ ├── RtcpEvent.java
│ │ │ │ ├── RtcpEventListener.java
│ │ │ │ ├── RtcpReceiverReportEvent.java
│ │ │ │ ├── RtcpSdesEvent.java
│ │ │ │ └── RtcpSenderReportEvent.java
│ │ │ ├── format/
│ │ │ │ ├── DummyFormat.java
│ │ │ │ ├── Format.java
│ │ │ │ ├── audio/
│ │ │ │ │ ├── AudioFormat.java
│ │ │ │ │ └── PcmuAudioFormat.java
│ │ │ │ └── video/
│ │ │ │ ├── H263VideoFormat.java
│ │ │ │ ├── H264VideoFormat.java
│ │ │ │ └── VideoFormat.java
│ │ │ ├── media/
│ │ │ │ ├── MediaException.java
│ │ │ │ ├── MediaInput.java
│ │ │ │ ├── MediaOutput.java
│ │ │ │ └── MediaSample.java
│ │ │ ├── stream/
│ │ │ │ ├── DummyPacketSourceStream.java
│ │ │ │ ├── MediaCaptureStream.java
│ │ │ │ ├── MediaRendererStream.java
│ │ │ │ ├── ProcessorInputStream.java
│ │ │ │ ├── ProcessorOutputStream.java
│ │ │ │ └── RtpInputStream.java
│ │ │ └── util/
│ │ │ ├── Buffer.java
│ │ │ ├── Packet.java
│ │ │ └── SystemTimeBase.java
│ │ ├── platform/
│ │ │ ├── AndroidFactory.java
│ │ │ ├── FactoryException.java
│ │ │ ├── file/
│ │ │ │ ├── FileDescription.java
│ │ │ │ └── FileFactory.java
│ │ │ ├── logger/
│ │ │ │ └── AndroidAppender.java
│ │ │ ├── network/
│ │ │ │ ├── AndroidDatagramConnection.java
│ │ │ │ ├── AndroidHttpConnection.java
│ │ │ │ ├── AndroidNetworkFactory.java
│ │ │ │ ├── AndroidSocketConnection.java
│ │ │ │ ├── AndroidSocketServerConnection.java
│ │ │ │ ├── DatagramConnection.java
│ │ │ │ ├── HttpConnection.java
│ │ │ │ ├── NetworkFactory.java
│ │ │ │ ├── SocketConnection.java
│ │ │ │ └── SocketServerConnection.java
│ │ │ └── registry/
│ │ │ ├── AndroidRegistryFactory.java
│ │ │ └── RegistryFactory.java
│ │ ├── provider/
│ │ │ └── settings/
│ │ │ ├── RcsSettings.java
│ │ │ └── RcsSettingsData.java
│ │ ├── service/
│ │ │ └── api/
│ │ │ └── client/
│ │ │ ├── capability/
│ │ │ │ └── Capabilities.java
│ │ │ └── media/
│ │ │ ├── IMediaEventListener.aidl
│ │ │ ├── IMediaPlayer.aidl
│ │ │ ├── IMediaRenderer.aidl
│ │ │ ├── MediaCodec.aidl
│ │ │ ├── MediaCodec.java
│ │ │ └── video/
│ │ │ ├── VideoCodec.java
│ │ │ └── VideoSurfaceView.java
│ │ └── utils/
│ │ ├── FifoBuffer.java
│ │ ├── NetworkRessourceManager.java
│ │ └── logger/
│ │ ├── Appender.java
│ │ └── Logger.java
│ └── de/
│ └── kp/
│ ├── net/
│ │ ├── rtp/
│ │ │ ├── RtpPacket.java
│ │ │ ├── RtpRandom.java
│ │ │ ├── RtpSender.java
│ │ │ ├── RtpSocket.java
│ │ │ ├── packetizer/
│ │ │ │ ├── AbstractPacketizer.java
│ │ │ │ ├── H263Packetizer.java
│ │ │ │ ├── H264Fifo.java
│ │ │ │ └── H264Packetizer.java
│ │ │ ├── recorder/
│ │ │ │ ├── MediaRtpSender.java
│ │ │ │ └── RtspVideoRecorder.java
│ │ │ ├── stream/
│ │ │ │ └── RtpOutputStream.java
│ │ │ └── viewer/
│ │ │ └── RtpVideoRenderer.java
│ │ └── rtsp/
│ │ ├── RtspConstants.java
│ │ ├── client/
│ │ │ ├── RtspClient.java
│ │ │ ├── RtspControl.java
│ │ │ ├── api/
│ │ │ │ ├── EntityMessage.java
│ │ │ │ ├── Message.java
│ │ │ │ ├── MessageFactory.java
│ │ │ │ ├── Request.java
│ │ │ │ ├── RequestListener.java
│ │ │ │ ├── Response.java
│ │ │ │ ├── Transport.java
│ │ │ │ └── TransportListener.java
│ │ │ ├── header/
│ │ │ │ ├── CSeqHeader.java
│ │ │ │ ├── ContentEncodingHeader.java
│ │ │ │ ├── ContentLengthHeader.java
│ │ │ │ ├── ContentTypeHeader.java
│ │ │ │ ├── RtspBaseIntegerHeader.java
│ │ │ │ ├── RtspBaseStringHeader.java
│ │ │ │ ├── RtspContent.java
│ │ │ │ ├── RtspHeader.java
│ │ │ │ ├── SessionHeader.java
│ │ │ │ └── TransportHeader.java
│ │ │ ├── message/
│ │ │ │ ├── MessageBuffer.java
│ │ │ │ ├── RtspDescriptor.java
│ │ │ │ ├── RtspEntityMessage.java
│ │ │ │ ├── RtspMedia.java
│ │ │ │ ├── RtspMessage.java
│ │ │ │ └── RtspMessageFactory.java
│ │ │ ├── request/
│ │ │ │ ├── RtspDescribeRequest.java
│ │ │ │ ├── RtspOptionsRequest.java
│ │ │ │ ├── RtspPauseRequest.java
│ │ │ │ ├── RtspPlayRequest.java
│ │ │ │ ├── RtspRequest.java
│ │ │ │ ├── RtspSetupRequest.java
│ │ │ │ └── RtspTeardownRequest.java
│ │ │ ├── response/
│ │ │ │ └── RtspResponse.java
│ │ │ └── transport/
│ │ │ ├── TCPTransport.java
│ │ │ └── TCPTransportListener.java
│ │ └── server/
│ │ ├── RtspServer.java
│ │ └── response/
│ │ ├── Parser.java
│ │ ├── RtspAnnounceResponse.java
│ │ ├── RtspDescribeResponse.java
│ │ ├── RtspError.java
│ │ ├── RtspOptionsResponse.java
│ │ ├── RtspPauseResponse.java
│ │ ├── RtspPlayResponse.java
│ │ ├── RtspResponse.java
│ │ ├── RtspResponseTeardown.java
│ │ ├── RtspSetupResponse.java
│ │ └── SDP.java
│ └── rtspcamera/
│ ├── MediaConstants.java
│ ├── RtspApiCodecsCamera.java
│ └── RtspNativeCodecsCamera.java
└── RtspViewer/
├── .classpath
├── .gitignore
├── .project
├── AndroidManifest.xml
├── gpl.txt
├── proguard-project.txt
├── project.properties
├── res/
│ ├── layout/
│ │ └── videoview.xml
│ └── values/
│ └── strings.xml
└── src/
└── de/
└── kp/
└── rtspviewer/
└── RtspViewerActivity.java
================================================
FILE CONTENTS
================================================
================================================
FILE: README.md
================================================
# RTSP-Camera-for-Android
Android based RTSP Server which is able to serve live camera view to multiple RTSP clients, such as VLC.
This project is not maintained anymore (in fact since 2end of 2012).
It exists to share the code how to implement this back in the days.
I've not tested out the following gitHub project on my own, but if you are looking for a more actual Android RTSP based solution, pls check out:
* https://github.com/hypeapps/Endoscope
thanks for all the fish
(=PA=)
================================================
FILE: RtspCamera/.classpath
================================================
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>
================================================
FILE: RtspCamera/.gitignore
================================================
/bin
/gen
================================================
FILE: RtspCamera/.project
================================================
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>RtspCamera</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
================================================
FILE: RtspCamera/.settings/org.jboss.ide.eclipse.as.core.prefs
================================================
eclipse.preferences.version=1
org.jboss.ide.eclipse.as.core.singledeployable.deployableList=
================================================
FILE: RtspCamera/AndroidManifest.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="de.kp.rtspcamera"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="5"
android:targetSdkVersion="6" />
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<application
android:icon="@drawable/icon"
android:debuggable="true"
android:label="@string/app_name" >
<activity
android:name=".RtspNativeCodecsCamera"
android:label="@string/app_name"
android:screenOrientation="landscape" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
================================================
FILE: RtspCamera/gpl.txt
================================================
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
================================================
FILE: RtspCamera/jni/Android.mk
================================================
include $(call all-subdir-makefiles)
================================================
FILE: RtspCamera/jni/Application.mk
================================================
APP_PROJECT_PATH := /arwa/git/RTSP-Camera-for-Android/RtspCamera
APP_MODULES := libH264Decoder libH264Encoder libH263Encoder libH263Decoder
================================================
FILE: RtspCamera/jni/avc_h264/Android.mk
================================================
AVC_ROOT:= $(call my-dir)
include $(call all-subdir-makefiles)
================================================
FILE: RtspCamera/jni/avc_h264/common/include/avcapi_common.h
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains common type definitions and enumerations used by AVC encoder
and decoder libraries which are exposed to the users.
@publishedAll
*/
#ifndef AVCAPI_COMMON_H_INCLUDED
#define AVCAPI_COMMON_H_INCLUDED
// xxx pa deact PV_MEMORY_POOL for test
#define PV_MEMORY_POOL
/**
This is common return status.
@publishedAll
*/
typedef enum
{
AVC_NO_BUFFER = -2,
AVC_MEMORY_FAIL = -1,
AVC_FAIL = 0,
AVC_SUCCESS = 1,
AVC_PICTURE_OUTPUT_READY = 2
} AVCStatus;
/**
This enumeration is for profiles. The value follows the profile_idc in sequence
parameter set rbsp. See Annex A.
@publishedAll
*/
typedef enum
{
AVC_BASELINE = 66,
AVC_MAIN = 77,
AVC_EXTENDED = 88,
AVC_HIGH = 100,
AVC_HIGH10 = 110,
AVC_HIGH422 = 122,
AVC_HIGH444 = 144
} AVCProfile;
/**
This enumeration is for levels. The value follows the level_idc in sequence
parameter set rbsp. See Annex A.
@published All
*/
typedef enum
{
AVC_LEVEL_AUTO = 0,
AVC_LEVEL1_B = 9,
AVC_LEVEL1 = 10,
AVC_LEVEL1_1 = 11,
AVC_LEVEL1_2 = 12,
AVC_LEVEL1_3 = 13,
AVC_LEVEL2 = 20,
AVC_LEVEL2_1 = 21,
AVC_LEVEL2_2 = 22,
AVC_LEVEL3 = 30,
AVC_LEVEL3_1 = 31,
AVC_LEVEL3_2 = 32,
AVC_LEVEL4 = 40,
AVC_LEVEL4_1 = 41,
AVC_LEVEL4_2 = 42,
AVC_LEVEL5 = 50,
AVC_LEVEL5_1 = 51
} AVCLevel;
/**
This enumeration follows Table 7-1 for NAL unit type codes.
This may go to avccommon_api.h later (external common).
@publishedAll
*/
typedef enum
{
AVC_NALTYPE_SLICE = 1, /* non-IDR non-data partition */
AVC_NALTYPE_DPA = 2, /* data partition A */
AVC_NALTYPE_DPB = 3, /* data partition B */
AVC_NALTYPE_DPC = 4, /* data partition C */
AVC_NALTYPE_IDR = 5, /* IDR NAL */
AVC_NALTYPE_SEI = 6, /* supplemental enhancement info */
AVC_NALTYPE_SPS = 7, /* sequence parameter set */
AVC_NALTYPE_PPS = 8, /* picture parameter set */
AVC_NALTYPE_AUD = 9, /* access unit delimiter */
AVC_NALTYPE_EOSEQ = 10, /* end of sequence */
AVC_NALTYPE_EOSTREAM = 11, /* end of stream */
AVC_NALTYPE_FILL = 12 /* filler data */
} AVCNalUnitType;
/**
This enumeration specifies debug logging type.
This may go to avccommon_api.h later (external common).
@publishedAll
*/
typedef enum
{
AVC_LOGTYPE_ERROR = 0,
AVC_LOGTYPE_WARNING = 1,
AVC_LOGTYPE_INFO = 2
} AVCLogType;
/**
This enumerate the status of certain flags.
@publishedAll
*/
typedef enum
{
AVC_OFF = 0,
AVC_ON = 1
} AVCFlag;
/**
This structure contains input information.
Note, this structure is identical to AVCDecOutput for now.
*/
typedef struct tagAVCFrameIO
{
/** A unique identification number for a particular instance of this structure.
To remain unchanged by the application between the time when it is given to the
library and the time when the library returns it back. */
uint32 id;
/** Array of pointers to Y,Cb,Cr content in 4:2:0 format. For AVC decoding,
this memory is allocated by the AVC decoder library. For AVC encoding, only the
memory for original unencoded frame is allocated by the application. Internal
memory is also allocated by the AVC encoder library. */
uint8 *YCbCr[3];
/** In/Out: Coded width of the luma component, it has to be multiple of 16. */
int pitch;
/** In/Out: Coded height of the luma component, must be multiple of 16. */
int height;
/** In/Out: Display width, less than picth */
int clip_width;
/** In/Out: Display height, less than height */
int clip_height;
/** Input: Origin of the display area [0]=>row, [1]=>column */
int clip_origin[2];
/** Output: Frame number in de/encoding order (not necessary)*/
uint32 coding_order;
/** Output: Frame number in displaying order (this may or may not be associated with the POC at all!!!). */
uint32 disp_order;
/** In/Out: Flag for use for reference or not. */
uint is_reference;
/** In/Out: Coding timestamp in msec (not display timestamp) */
uint32 coding_timestamp;
/* there could be something else here such as format, DON (decoding order number)
if available thru SEI, etc. */
} AVCFrameIO;
/** CALLBACK FUNCTION TO BE IMPLEMENTED BY APPLICATION */
/** In AVCDecControls structure, userData is a pointer to an object with the following
member functions.
*/
/** @brief Decoded picture buffers (DPB) must be allocated or re-allocated before an
IDR frame is decoded. If PV_MEMORY_POOL is not defined, AVC lib will allocate DPB
internally which cannot be shared with the application. In that case, this function
will not be called.
@param userData The same value of userData in AVCHandle object.
@param frame_size_in_mbs The size of each frame in number of macroblocks.
@param num_frames The number of frames in DPB.
@return 1 for success, 0 for fail (cannot allocate DPB)
*/
typedef int (*FunctionType_DPBAlloc)(void *userData, uint frame_size_in_mbs, uint num_buffers);
/** @brief AVC library calls this function is reserve a memory of one frame from the DPB.
Once reserved, this frame shall not be deleted or over-written by the app.
@param userData The same value of userData in AVCHandle object.
@param indx Index of a frame in DPB (AVC library keeps track of the index).
@param yuv The address of the yuv pointer returned to the AVC lib.
@return 1 for success, 0 for fail (no frames available to bind).
*/
typedef int (*FunctionType_FrameBind)(void *userData, int indx, uint8 **yuv);
/** @brief AVC library calls this function once a bound frame is not needed for decoding
operation (falls out of the sliding window, or marked unused for reference).
@param userData The same value of userData in AVCHandle object.
@param indx Index of frame to be unbound (AVC library keeps track of the index).
@return none.
*/
typedef void (*FuctionType_FrameUnbind)(void *userData, int);
/** Pointer to malloc function for general memory allocation, so that application can keep track of
memory usage.
\param "size" "Size of requested memory in bytes."
\param "attribute" "Some value specifying types, priority, etc. of the memory."
\return "The address of the allocated memory casted to int"
*/
typedef int (*FunctionType_Malloc)(void *userData, int32 size, int attribute);
/** Function pointer to free
\param "mem" "Pointer to the memory to be freed casted to int"
\return "void"
*/
typedef void (*FunctionType_Free)(void *userData, int mem);
/** Debug logging information is returned to the application thru this function.
\param "type" "Type of logging message, see definition of AVCLogType."
\param "string1" "Logging message."
\param "string2" "To be defined."
*/
typedef void (*FunctionType_DebugLog)(uint32 *userData, AVCLogType type, char *string1, int val1, int val2);
/**
This structure has to be allocated and maintained by the user of the library.
This structure is used as a handle to the library object.
*/
typedef struct tagAVCHandle
{
/** A pointer to the internal data structure. Users have to make sure that this value
is NULL at the beginning.
*/
void *AVCObject;
/** A pointer to user object which has the following member functions used for
callback purpose. !!! */
void *userData;
/** Pointers to functions implemented by the users of AVC library */
FunctionType_DPBAlloc CBAVC_DPBAlloc;
FunctionType_FrameBind CBAVC_FrameBind;
FuctionType_FrameUnbind CBAVC_FrameUnbind;
FunctionType_Malloc CBAVC_Malloc;
FunctionType_Free CBAVC_Free;
FunctionType_DebugLog CBAVC_DebugLog;
/** Flag to enable debugging */
uint32 debugEnable;
} AVCHandle;
#ifdef PVDEBUGMSG_LOG
#define DEBUG_LOG(a,b,c,d,e) CBAVC_DebugLog(a,b,c,d,e)
#else
#define DEBUG_LOG(a,b,c,d,e)
#endif
#endif /* _AVCAPI_COMMON_H_ */
================================================
FILE: RtspCamera/jni/avc_h264/common/include/avcint_common.h
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains common code shared between AVC decoder and AVC encoder for
internal use only.
@publishedAll
*/
#ifndef AVCINT_COMMON_H_INCLUDED
#define AVCINT_COMMON_H_INCLUDED
#ifndef OSCL_TYPES_H_INCLUDED
#include "oscl_types.h"
#endif
#ifndef AVCAPI_COMMON_H_INCLUDED
#include "avcapi_common.h"
#endif
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
/**
Mathematic functions defined in subclause 5.7.
Can be replaced with assembly instructions for speedup.
@publishedAll
*/
#define AVC_ABS(x) (((x)<0)? -(x) : (x))
#define AVC_SIGN(x) (((x)<0)? -1 : 1)
#define AVC_SIGN0(x) (((x)<0)? -1 : (((x)>0) ? 1 : 0))
#define AVC_MAX(x,y) ((x)>(y)? (x):(y))
#define AVC_MIN(x,y) ((x)<(y)? (x):(y))
#define AVC_MEDIAN(A,B,C) ((A) > (B) ? ((A) < (C) ? (A) : (B) > (C) ? (B) : (C)): (B) < (C) ? (B) : (C) > (A) ? (C) : (A))
#define AVC_CLIP3(a,b,x) (AVC_MAX(a,AVC_MIN(x,b))) /* clip x between a and b */
#define AVC_CLIP(x) AVC_CLIP3(0,255,x)
#define AVC_FLOOR(x) ((int)(x))
#define AVC_RASTER_SCAN(x,y,n) ((x)+(y)*(n))
#define AVC_ROUND(x) (AVC_SIGN(x)*AVC_FLOOR(AVC_ABS(x)+0.5))
#define AVC_INVERSE_RASTER_SCAN(a,b,c,d,e) (((e)==0)? (((a)%((d)/(b)))*(b)): (((a)/((d)/(b)))*(c)))
/* a:block address, b:block width, c:block height, d:total_width, e:x or y coordinate */
#define DEFAULT_ATTR 0 /* default memory attribute */
#define FAST_MEM_ATTR 1 /* fast memory attribute */
/* This section is for definition of constants. */
#define MB_SIZE 16
#define BLOCK_SIZE 4
#define EMULATION_PREVENTION_THREE_BYTE 0x3
#define NUM_PIXELS_IN_MB (24*16)
#define NUM_BLKS_IN_MB 24
#define AVCNumI4PredMode 9
#define AVCNumI16PredMode 4
#define AVCNumIChromaMode 4
/* constants used in the structures below */
#define MAXIMUMVALUEOFcpb_cnt 32 /* used in HRDParams */
#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 255 /* used in SeqParamSet */
#define MAX_NUM_SLICE_GROUP 8 /* used in PicParamSet */
#define MAX_REF_PIC_LIST_REORDERING 32 /* 32 is maximum according to Annex A, SliceHeader */
#define MAX_DEC_REF_PIC_MARKING 64 /* 64 is the maximum possible given the max num ref pictures to 31. */
#define MAX_FS (16+1) /* pre-defined size of frame store array */
#define MAX_LEVEL_IDX 15 /* only 15 levels defined for now */
#define MAX_REF_PIC_LIST 33 /* max size of the RefPicList0 and RefPicList1 */
/**
Architectural related macros.
@publishedAll
*/
#ifdef USE_PRED_BLOCK
#define MB_BASED_DEBLOCK
#endif
/**
Picture type, PV created.
@publishedAll
*/
typedef enum
{
AVC_FRAME = 3
} AVCPictureType;
/**
This slice type follows Table 7-3. The bottom 5 items may not needed.
@publishedAll
*/
typedef enum
{
AVC_P_SLICE = 0,
AVC_B_SLICE = 1,
AVC_I_SLICE = 2,
AVC_SP_SLICE = 3,
AVC_SI_SLICE = 4,
AVC_P_ALL_SLICE = 5,
AVC_B_ALL_SLICE = 6,
AVC_I_ALL_SLICE = 7,
AVC_SP_ALL_SLICE = 8,
AVC_SI_ALL_SLICE = 9
} AVCSliceType;
/**
Types of the macroblock and partition. PV Created.
@publishedAll
*/
typedef enum
{
/* intra */
AVC_I4,
AVC_I16,
AVC_I_PCM,
AVC_SI4,
/* inter for both P and B*/
AVC_BDirect16,
AVC_P16,
AVC_P16x8,
AVC_P8x16,
AVC_P8,
AVC_P8ref0,
AVC_SKIP
} AVCMBMode;
/**
Enumeration for sub-macroblock mode, interpreted from sub_mb_type.
@publishedAll
*/
typedef enum
{
/* for sub-partition mode */
AVC_BDirect8,
AVC_8x8,
AVC_8x4,
AVC_4x8,
AVC_4x4
} AVCSubMBMode;
/**
Mode of prediction of partition or sub-partition. PV Created.
Do not change the order!!! Used in table look-up mode prediction in
vlc.c.
@publishedAll
*/
typedef enum
{
AVC_Pred_L0 = 0,
AVC_Pred_L1,
AVC_BiPred,
AVC_Direct
} AVCPredMode;
/**
Mode of intra 4x4 prediction. Table 8-2
@publishedAll
*/
typedef enum
{
AVC_I4_Vertical = 0,
AVC_I4_Horizontal,
AVC_I4_DC,
AVC_I4_Diagonal_Down_Left,
AVC_I4_Diagonal_Down_Right,
AVC_I4_Vertical_Right,
AVC_I4_Horizontal_Down,
AVC_I4_Vertical_Left,
AVC_I4_Horizontal_Up
} AVCIntra4x4PredMode;
/**
Mode of intra 16x16 prediction. Table 8-3
@publishedAll
*/
typedef enum
{
AVC_I16_Vertical = 0,
AVC_I16_Horizontal,
AVC_I16_DC,
AVC_I16_Plane
} AVCIntra16x16PredMode;
/**
Mode of intra chroma prediction. Table 8-4
@publishedAll
*/
typedef enum
{
AVC_IC_DC = 0,
AVC_IC_Horizontal,
AVC_IC_Vertical,
AVC_IC_Plane
} AVCIntraChromaPredMode;
/**
Type of residual going to residual_block_cavlc function, PV created.
@publishedAll
*/
typedef enum
{
AVC_Luma,
AVC_Intra16DC,
AVC_Intra16AC,
AVC_ChromaDC,
AVC_ChromaAC
} AVCResidualType;
/**
This structure contains VUI parameters as specified in Annex E.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagHRDParams
{
uint cpb_cnt_minus1; /* ue(v), range 0..31 */
uint bit_rate_scale; /* u(4) */
uint cpb_size_scale; /* u(4) */
uint32 bit_rate_value_minus1[MAXIMUMVALUEOFcpb_cnt];/* ue(v), range 0..2^32-2 */
uint32 cpb_size_value_minus1[MAXIMUMVALUEOFcpb_cnt]; /* ue(v), range 0..2^32-2 */
uint cbr_flag[MAXIMUMVALUEOFcpb_cnt]; /* u(1) */
uint initial_cpb_removal_delay_length_minus1; /* u(5), default 23 */
uint cpb_removal_delay_length_minus1; /* u(5), default 23 */
uint dpb_output_delay_length_minus1; /* u(5), default 23 */
uint time_offset_length; /* u(5), default 24 */
} AVCHRDParams;
/**
This structure contains VUI parameters as specified in Annex E.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagVUIParam
{
uint aspect_ratio_info_present_flag; /* u(1) */
uint aspect_ratio_idc; /* u(8), table E-1 */
uint sar_width; /* u(16) */
uint sar_height; /* u(16) */
uint overscan_info_present_flag; /* u(1) */
uint overscan_appropriate_flag; /* u(1) */
uint video_signal_type_present_flag; /* u(1) */
uint video_format; /* u(3), Table E-2, default 5, unspecified */
uint video_full_range_flag; /* u(1) */
uint colour_description_present_flag; /* u(1) */
uint colour_primaries; /* u(8), Table E-3, default 2, unspecified */
uint transfer_characteristics; /* u(8), Table E-4, default 2, unspecified */
uint matrix_coefficients; /* u(8), Table E-5, default 2, unspecified */
uint chroma_location_info_present_flag; /* u(1) */
uint chroma_sample_loc_type_top_field; /* ue(v), Fig. E-1range 0..5, default 0 */
uint chroma_sample_loc_type_bottom_field; /* ue(v) */
uint timing_info_present_flag; /* u(1) */
uint num_units_in_tick; /* u(32), must be > 0 */
uint time_scale; /* u(32), must be > 0 */
uint fixed_frame_rate_flag; /* u(1), Eq. C-13 */
uint nal_hrd_parameters_present_flag; /* u(1) */
AVCHRDParams nal_hrd_parameters; /* hrd_paramters */
uint vcl_hrd_parameters_present_flag; /* u(1) */
AVCHRDParams vcl_hrd_parameters; /* hrd_paramters */
/* if ((nal_hrd_parameters_present_flag || (vcl_hrd_parameters_present_flag)) */
uint low_delay_hrd_flag; /* u(1) */
uint pic_struct_present_flag;
uint bitstream_restriction_flag; /* u(1) */
uint motion_vectors_over_pic_boundaries_flag; /* u(1) */
uint max_bytes_per_pic_denom; /* ue(v), default 2 */
uint max_bits_per_mb_denom; /* ue(v), range 0..16, default 1 */
uint log2_max_mv_length_vertical; /* ue(v), range 0..16, default 16 */
uint log2_max_mv_length_horizontal; /* ue(v), range 0..16, default 16 */
uint max_dec_frame_reordering; /* ue(v) */
uint max_dec_frame_buffering; /* ue(v) */
} AVCVUIParams;
/**
This structure contains information in a sequence parameter set NAL.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagSeqParamSet
{
uint Valid; /* indicates the parameter set is valid */
uint profile_idc; /* u(8) */
uint constrained_set0_flag; /* u(1) */
uint constrained_set1_flag; /* u(1) */
uint constrained_set2_flag; /* u(1) */
uint constrained_set3_flag; /* u(1) */
uint level_idc; /* u(8) */
uint seq_parameter_set_id; /* ue(v), range 0..31 */
uint log2_max_frame_num_minus4; /* ue(v), range 0..12 */
uint pic_order_cnt_type; /* ue(v), range 0..2 */
/* if( pic_order_cnt_type == 0 ) */
uint log2_max_pic_order_cnt_lsb_minus4; /* ue(v), range 0..12 */
/* else if( pic_order_cnt_type == 1 ) */
uint delta_pic_order_always_zero_flag; /* u(1) */
int32 offset_for_non_ref_pic; /* se(v) */
int32 offset_for_top_to_bottom_field; /* se(v) */
uint num_ref_frames_in_pic_order_cnt_cycle; /* ue(v) , range 0..255 */
/* for( i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++ ) */
int32 offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE]; /* se(v) */
uint num_ref_frames; /* ue(v), range 0..16 */
uint gaps_in_frame_num_value_allowed_flag; /* u(1) */
uint pic_width_in_mbs_minus1; /* ue(v) */
uint pic_height_in_map_units_minus1; /* ue(v) */
uint frame_mbs_only_flag; /* u(1) */
/* if( !frame_mbs_only_flag ) */
uint mb_adaptive_frame_field_flag; /* u(1) */
uint direct_8x8_inference_flag; /* u(1), must be 1 when frame_mbs_only_flag is 0 */
uint frame_cropping_flag; /* u(1) */
/* if( frmae_cropping_flag) */
uint frame_crop_left_offset; /* ue(v) */
uint frame_crop_right_offset; /* ue(v) */
uint frame_crop_top_offset; /* ue(v) */
uint frame_crop_bottom_offset; /* ue(v) */
uint vui_parameters_present_flag; /* u(1) */
// uint nal_hrd_parameters_present_flag;
// uint vcl_hrd_parameters_present_flag;
// AVCHRDParams *nal_hrd_parameters;
// AVCHRDParams *vcl_hrd_parameters;
AVCVUIParams vui_parameters; /* AVCVUIParam */
} AVCSeqParamSet;
/**
This structure contains information in a picture parameter set NAL.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagPicParamSet
{
uint pic_parameter_set_id; /* ue(v), range 0..255 */
uint seq_parameter_set_id; /* ue(v), range 0..31 */
uint entropy_coding_mode_flag; /* u(1) */
uint pic_order_present_flag; /* u(1) */
uint num_slice_groups_minus1; /* ue(v), range in Annex A */
/* if( num_slice_groups_minus1 > 0) */
uint slice_group_map_type; /* ue(v), range 0..6 */
/* if( slice_group_map_type = = 0 ) */
/* for(0:1:num_slice_groups_minus1) */
uint run_length_minus1[MAX_NUM_SLICE_GROUP]; /* ue(v) */
/* else if( slice_group_map_type = = 2 ) */
/* for(0:1:num_slice_groups_minus1-1) */
uint top_left[MAX_NUM_SLICE_GROUP-1]; /* ue(v) */
uint bottom_right[MAX_NUM_SLICE_GROUP-1]; /* ue(v) */
/* else if( slice_group_map_type = = 3 || 4 || 5 */
uint slice_group_change_direction_flag; /* u(1) */
uint slice_group_change_rate_minus1; /* ue(v) */
/* else if( slice_group_map_type = = 6 ) */
uint pic_size_in_map_units_minus1; /* ue(v) */
/* for(0:1:pic_size_in_map_units_minus1) */
uint *slice_group_id; /* complete MBAmap u(v) */
uint num_ref_idx_l0_active_minus1; /* ue(v), range 0..31 */
uint num_ref_idx_l1_active_minus1; /* ue(v), range 0..31 */
uint weighted_pred_flag; /* u(1) */
uint weighted_bipred_idc; /* u(2), range 0..2 */
int pic_init_qp_minus26; /* se(v), range -26..25 */
int pic_init_qs_minus26; /* se(v), range -26..25 */
int chroma_qp_index_offset; /* se(v), range -12..12 */
uint deblocking_filter_control_present_flag; /* u(1) */
uint constrained_intra_pred_flag; /* u(1) */
uint redundant_pic_cnt_present_flag; /* u(1) */
} AVCPicParamSet;
/**
This structure contains slice header information.
Some variables may be removed from the structure if they are found to be useless to store.
@publishedAll
*/
typedef struct tagSliceHeader
{
uint first_mb_in_slice; /* ue(v) */
AVCSliceType slice_type; /* ue(v), Table 7-3, range 0..9 */
uint pic_parameter_set_id; /* ue(v), range 0..255 */
uint frame_num; /* u(v), see log2max_frame_num_minus4 */
/* if( !frame_mbs_only_flag) */
uint field_pic_flag; /* u(1) */
/* if(field_pic_flag) */
uint bottom_field_flag; /* u(1) */
/* if(nal_unit_type == 5) */
uint idr_pic_id; /* ue(v), range 0..65535 */
/* if(pic_order_cnt_type==0) */
uint pic_order_cnt_lsb; /* u(v), range 0..MaxPicOrderCntLsb-1 */
/* if(pic_order_present_flag && !field_pic_flag) */
int32 delta_pic_order_cnt_bottom; /* se(v) */
/* if(pic_order_cnt_type==1 && !delta_pic_order_always_zero_flag) */
/* if(pic_order_present_flag && !field_pic_flag) */
int32 delta_pic_order_cnt[2];
/* if(redundant_pic_cnt_present_flag) */
uint redundant_pic_cnt; /* ue(v), range 0..127 */
/* if(slice_type == B) */
uint direct_spatial_mv_pred_flag; /* u(1) */
/* if(slice_type == P || slice_type==SP || slice_type==B) */
uint num_ref_idx_active_override_flag; /* u(1) */
/* if(num_ref_idx_active_override_flag) */
uint num_ref_idx_l0_active_minus1; /* ue(v) */
/* if(slie_type == B) */
uint num_ref_idx_l1_active_minus1; /* ue(v) */
/* ref_pic_list_reordering() */
uint ref_pic_list_reordering_flag_l0; /* u(1) */
uint reordering_of_pic_nums_idc_l0[MAX_REF_PIC_LIST_REORDERING]; /* ue(v), range 0..3 */
uint abs_diff_pic_num_minus1_l0[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
uint long_term_pic_num_l0[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
uint ref_pic_list_reordering_flag_l1; /* u(1) */
uint reordering_of_pic_nums_idc_l1[MAX_REF_PIC_LIST_REORDERING]; /* ue(v), range 0..3 */
uint abs_diff_pic_num_minus1_l1[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
uint long_term_pic_num_l1[MAX_REF_PIC_LIST_REORDERING]; /* ue(v) */
/* end ref_pic_list_reordering() */
/* if(nal_ref_idc!=0) */
/* dec_ref_pic_marking() */
uint no_output_of_prior_pics_flag; /* u(1) */
uint long_term_reference_flag; /* u(1) */
uint adaptive_ref_pic_marking_mode_flag; /* u(1) */
uint memory_management_control_operation[MAX_DEC_REF_PIC_MARKING]; /* ue(v), range 0..6 */
uint difference_of_pic_nums_minus1[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
uint long_term_pic_num[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
uint long_term_frame_idx[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
uint max_long_term_frame_idx_plus1[MAX_DEC_REF_PIC_MARKING]; /* ue(v) */
/* end dec_ref_pic_marking() */
/* if(entropy_coding_mode_flag && slice_type!=I && slice_type!=SI) */
uint cabac_init_idc; /* ue(v), range 0..2 */
int slice_qp_delta; /* se(v), range 0..51 */
/* if(slice_type==SP || slice_type==SI) */
/* if(slice_type==SP) */
uint sp_for_switch_flag; /* u(1) */
int slice_qs_delta; /* se(v) */
/* if(deblocking_filter_control_present_flag)*/
uint disable_deblocking_filter_idc; /* ue(v), range 0..2 */
/* if(disable_deblocking_filter_idc!=1) */
int slice_alpha_c0_offset_div2; /* se(v), range -6..6, default 0 */
int slice_beta_offset_div_2; /* se(v), range -6..6, default 0 */
/* if(num_slice_groups_minus1>0 && slice_group_map_type>=3 && slice_group_map_type<=5)*/
uint slice_group_change_cycle; /* u(v), use ceil(log2(PicSizeInMapUnits/SliceGroupChangeRate + 1)) bits*/
} AVCSliceHeader;
/**
This struct contains information about the neighboring pixel.
@publishedAll
*/
typedef struct tagPixPos
{
int available;
int mb_addr; /* macroblock address of the current pixel, see below */
int x; /* x,y positions of current pixel relative to the macroblock mb_addr */
int y;
int pos_x; /* x,y positions of current pixel relative to the picture. */
int pos_y;
} AVCPixelPos;
typedef struct tagNeighborAvailability
{
int left;
int top; /* macroblock address of the current pixel, see below */
int top_right; /* x,y positions of current pixel relative to the macroblock mb_addr */
} AVCNeighborAvailability;
/**
This structure contains picture data and related information necessary to be used as
reference frame.
@publishedAll
*/
typedef struct tagPictureData
{
uint16 RefIdx; /* index used for reference frame */
uint8 *Sl; /* derived from base_dpb in AVCFrameStore */
uint8 *Scb; /* for complementary fields, YUV are interlaced */
uint8 *Scr; /* Sl of top_field and bottom_fields will be one line apart and the
stride will be 2 times the width. */
/* For non-complementary field, the above still applies. A special
output formatting is required. */
/* Then, necessary variables that need to be stored */
AVCPictureType picType; /* frame, top-field or bot-field */
/*bool*/
uint isReference;
/*bool*/
uint isLongTerm;
int PicOrderCnt;
int PicNum;
int LongTermPicNum;
int width; /* how many pixel per line */
int height;/* how many line */
int pitch; /* how many pixel between the line */
uint padded; /* flag for being padded */
} AVCPictureData;
/**
This structure contains information for frame storage.
@publishedAll
*/
typedef struct tagFrameStore
{
uint8 *base_dpb; /* base pointer for the YCbCr */
int IsReference; /* 0=not used for ref; 1=top used; 2=bottom used; 3=both fields (or frame) used */
int IsLongTerm; /* 0=not used for ref; 1=top used; 2=bottom used; 3=both fields (or frame) used */
/* if IsLongTerm is true, IsReference can be ignored. */
/* if IsReference is true, IsLongterm will be checked for short-term or long-term. */
/* IsUsed must be true to enable the validity of IsReference and IsLongTerm */
int IsOutputted; /* has it been outputted via AVCDecGetOutput API, then don't output it again,
wait until it is returned. */
AVCPictureData frame;
int FrameNum;
int FrameNumWrap;
int LongTermFrameIdx;
int PicOrderCnt; /* of the frame, smaller of the 2 fields */
} AVCFrameStore;
/**
This structure maintains the actual memory for the decoded picture buffer (DPB) which is
allocated at the beginning according to profile/level.
Once decoded_picture_buffer is allocated, Sl,Scb,Scr in
AVCPictureData structure just point to the address in decoded_picture_buffer.
used_size maintains the used space.
NOTE:: In order to maintain contiguous memory space, memory equal to a single frame is
assigned at a time. Two opposite fields reside in the same frame memory.
|-------|---|---|---|xxx|-------|xxx|---|-------| decoded_picture_buffer
frame top bot top frame bot frame
0 1 1 2 3 4 5
bot 2 and top 4 do not exist, the memory is not used.
@publishedAll
*/
typedef struct tagDecPicBuffer
{
uint8 *decoded_picture_buffer; /* actual memory */
uint32 dpb_size; /* size of dpb in bytes */
uint32 used_size; /* used size */
struct tagFrameStore *fs[MAX_FS]; /* list of frame stored, actual buffer */
int num_fs; /* size of fs */
} AVCDecPicBuffer;
/**
This structure contains macroblock related variables.
@publishedAll
*/
typedef struct tagMacroblock
{
AVCIntraChromaPredMode intra_chroma_pred_mode; /* ue(v) */
int32 mvL0[16]; /* motion vectors, 16 bit packed (x,y) per element */
int32 mvL1[16];
int16 ref_idx_L0[4];
int16 ref_idx_L1[4];
uint16 RefIdx[4]; /* ref index, has value of AVCPictureData->RefIdx */
/* stored data */
/*bool*/
uint mb_intra; /* intra flag */
/*bool*/
uint mb_bottom_field;
AVCMBMode mbMode; /* type of MB prediction */
AVCSubMBMode subMbMode[4]; /* for each 8x8 partition */
uint CBP; /* CodeBlockPattern */
AVCIntra16x16PredMode i16Mode; /* Intra16x16PredMode */
AVCIntra4x4PredMode i4Mode[16]; /* Intra4x4PredMode, in raster scan order */
int NumMbPart; /* number of partition */
AVCPredMode MBPartPredMode[4][4]; /* prediction mode [MBPartIndx][subMBPartIndx] */
int MbPartWidth;
int MbPartHeight;
int NumSubMbPart[4]; /* for each 8x8 partition */
int SubMbPartWidth[4]; /* for each 8x8 partition */
int SubMbPartHeight[4]; /* for each 8x8 partition */
uint8 nz_coeff[NUM_BLKS_IN_MB]; /* [blk_y][blk_x], Chroma is [4..5][0...3], see predict_nnz() function */
int QPy; /* Luma QP */
int QPc; /* Chroma QP */
int QSc; /* Chroma QP S-picture */
int slice_id; // MC slice
} AVCMacroblock;
/**
This structure contains common internal variables between the encoder and decoder
such that some functions can be shared among them.
@publishedAll
*/
typedef struct tagCommonObj
{
/* put these 2 up here to make sure they are word-aligned */
int16 block[NUM_PIXELS_IN_MB]; /* for transformed residue coefficient */
uint8 *pred_block; /* pointer to prediction block, could point to a frame */
#ifdef USE_PRED_BLOCK
uint8 pred[688]; /* for prediction */
/* Luma [0-399], Cb [400-543], Cr[544-687] */
#endif
int pred_pitch; /* either equal to 20 or to frame pitch */
/* temporary buffers for intra prediction */
/* these variables should remain inside fast RAM */
#ifdef MB_BASED_DEBLOCK
uint8 *intra_pred_top; /* a row of pixel for intra prediction */
uint8 intra_pred_left[17]; /* a column of pixel for intra prediction */
uint8 *intra_pred_top_cb;
uint8 intra_pred_left_cb[9];
uint8 *intra_pred_top_cr;
uint8 intra_pred_left_cr[9];
#endif
/* pointer to the prediction area for intra prediction */
uint8 *pintra_pred_top; /* pointer to the top intra prediction value */
uint8 *pintra_pred_left; /* pointer to the left intra prediction value */
uint8 intra_pred_topleft; /* the [-1,-1] neighboring pixel */
uint8 *pintra_pred_top_cb;
uint8 *pintra_pred_left_cb;
uint8 intra_pred_topleft_cb;
uint8 *pintra_pred_top_cr;
uint8 *pintra_pred_left_cr;
uint8 intra_pred_topleft_cr;
int QPy;
int QPc;
int QPy_div_6;
int QPy_mod_6;
int QPc_div_6;
int QPc_mod_6;
/**** nal_unit ******/
/* previously in AVCNALUnit format */
uint NumBytesInRBSP;
int forbidden_bit;
int nal_ref_idc;
AVCNalUnitType nal_unit_type;
AVCNalUnitType prev_nal_unit_type;
/*bool*/
uint slice_data_partitioning; /* flag when nal_unit_type is between 2 and 4 */
/**** ******** ******/
AVCSliceType slice_type;
AVCDecPicBuffer *decPicBuf; /* decoded picture buffer */
AVCSeqParamSet *currSeqParams; /* the currently used one */
AVCPicParamSet *currPicParams; /* the currently used one */
uint seq_parameter_set_id;
/* slice header */
AVCSliceHeader *sliceHdr; /* slice header param syntax variables */
AVCPictureData *currPic; /* pointer to current picture */
AVCFrameStore *currFS; /* pointer to current frame store */
AVCPictureType currPicType; /* frame, top-field or bot-field */
/*bool*/
uint newPic; /* flag for new picture */
uint newSlice; /* flag for new slice */
AVCPictureData *prevRefPic; /* pointer to previous picture */
AVCMacroblock *mblock; /* array of macroblocks covering entire picture */
AVCMacroblock *currMB; /* pointer to current macroblock */
uint mbNum; /* number of current MB */
int mb_x; /* x-coordinate of the current mbNum */
int mb_y; /* y-coordinate of the current mbNum */
/* For internal operation, scratch memory for MV, prediction, transform, etc.*/
uint32 cbp4x4; /* each bit represent nonzero 4x4 block in reverse raster scan order */
/* starting from luma, Cb and Cr, lsb toward msb */
int mvd_l0[4][4][2]; /* [mbPartIdx][subMbPartIdx][compIdx], se(v) */
int mvd_l1[4][4][2]; /* [mbPartIdx][subMbPartIdx][compIdx], se(v) */
int mbAddrA, mbAddrB, mbAddrC, mbAddrD; /* address of neighboring MBs */
/*bool*/
uint mbAvailA, mbAvailB, mbAvailC, mbAvailD; /* availability */
/*bool*/
uint intraAvailA, intraAvailB, intraAvailC, intraAvailD; /* for intra mode */
/***********************************************/
/* The following variables are defined in the draft. */
/* They may need to be stored in PictureData structure and used for reference. */
/* In that case, just move or copy it to AVCDecPictureData structure. */
int padded_size; /* size of extra padding to a frame */
uint MaxFrameNum; /*2^(log2_max_frame_num_minus4+4), range 0.. 2^16-1 */
uint MaxPicOrderCntLsb; /*2^(log2_max_pic_order_cnt_lsb_minus4+4), 0..2^16-1 */
uint PicWidthInMbs; /*pic_width_in_mbs_minus1+1 */
uint PicWidthInSamplesL; /* PicWidthInMbs*16 */
uint PicWidthInSamplesC; /* PicWIdthInMbs*8 */
uint PicHeightInMapUnits; /* pic_height_in_map_units_minus1+1 */
uint PicSizeInMapUnits; /* PicWidthInMbs*PicHeightInMapUnits */
uint FrameHeightInMbs; /*(2-frame_mbs_only_flag)*PicHeightInMapUnits */
uint SliceGroupChangeRate; /* slice_group_change_rate_minus1 + 1 */
/* access unit */
uint primary_pic_type; /* u(3), Table 7-2, kinda informative only */
/* slice data partition */
uint slice_id; /* ue(v) */
uint UnusedShortTermFrameNum;
uint PrevRefFrameNum;
uint MbaffFrameFlag; /* (mb_adaptive_frame_field_flag && !field_pic_flag) */
uint PicHeightInMbs; /* FrameHeightInMbs/(1+field_pic_flag) */
int PicHeightInSamplesL; /* PicHeightInMbs*16 */
int PicHeightInSamplesC; /* PicHeightInMbs*8 */
uint PicSizeInMbs; /* PicWidthInMbs*PicHeightInMbs */
uint level_idc;
int numMBs;
uint MaxPicNum;
uint CurrPicNum;
int QSy; /* 26+pic_init_qp_minus26+slice_qs_delta */
int FilterOffsetA;
int FilterOffsetB;
uint MapUnitsInSliceGroup0; /* Min(slie_group_change_cycle*SliceGroupChangeRate,PicSizeInMapUnits) */
/* dec_ref_pic_marking */
int MaxLongTermFrameIdx;
int LongTermFrameIdx;
/* POC related variables */
/*bool*/
uint mem_mgr_ctrl_eq_5; /* if memory_management_control_operation equal to 5 flag */
int PicOrderCnt;
int BottomFieldOrderCnt, TopFieldOrderCnt;
/* POC mode 0 */
int prevPicOrderCntMsb;
uint prevPicOrderCntLsb;
int PicOrderCntMsb;
/* POC mode 1 */
int prevFrameNumOffset, FrameNumOffset;
uint prevFrameNum;
int absFrameNum;
int picOrderCntCycleCnt, frameNumInPicOrderCntCycle;
int expectedDeltaPerPicOrderCntCycle;
int expectedPicOrderCnt;
/* FMO */
int *MbToSliceGroupMap; /* to be re-calculate at the beginning */
/* ref pic list */
AVCPictureData *RefPicList0[MAX_REF_PIC_LIST]; /* list 0 */
AVCPictureData *RefPicList1[MAX_REF_PIC_LIST]; /* list 1 */
AVCFrameStore *refFrameList0ShortTerm[32];
AVCFrameStore *refFrameList1ShortTerm[32];
AVCFrameStore *refFrameListLongTerm[32];
int refList0Size;
int refList1Size;
/* slice data semantics*/
int mb_skip_run; /* ue(v) */
/*uint mb_skip_flag;*/ /* ae(v) */
/* uint end_of_slice_flag;*//* ae(v) */
/***********************************************/
/* function pointers */
int (*is_short_ref)(AVCPictureData *s);
int (*is_long_ref)(AVCPictureData *s);
} AVCCommonObj;
/**
Commonly used constant arrays.
@publishedAll
*/
/**
Zigzag scan from 1-D to 2-D. */
const static uint8 ZZ_SCAN[16] = {0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15};
/* Zigzag scan from 1-D to 2-D output to block[24][16]. */
const static uint8 ZZ_SCAN_BLOCK[16] = {0, 1, 16, 32, 17, 2, 3, 18, 33, 48, 49, 34, 19, 35, 50, 51};
/**
From zigzag to raster for luma DC value */
const static uint8 ZIGZAG2RASTERDC[16] = {0, 4, 64, 128, 68, 8, 12, 72, 132, 192, 196, 136, 76, 140, 200, 204};
/**
Mapping from coding scan block indx to raster scan block index */
const static int blkIdx2blkX[16] = {0, 1, 0, 1, 2, 3, 2, 3, 0, 1, 0, 1, 2, 3, 2, 3};
const static int blkIdx2blkY[16] = {0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 3, 3, 2, 2, 3, 3};
/** from [blk8indx][blk4indx] to raster scan index */
const static int blkIdx2blkXY[4][4] = {{0, 1, 4, 5}, {2, 3, 6, 7}, {8, 9, 12, 13}, {10, 11, 14, 15}};
/*
Availability of the neighboring top-right block relative to the current block. */
const static int BlkTopRight[16] = {2, 2, 2, 3, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0};
/**
Table 8-13 Specification of QPc as a function of qPI. */
const static uint8 mapQPi2QPc[52] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 29, 30, 31, 32, 32, 33, 34, 34, 35, 35, 36, 36,
37, 37, 37, 38, 38, 38, 39, 39, 39, 39
};
/**
See 8.5.5 equation (8-252 and 8-253) the definition of v matrix. */
/* in zigzag scan */
const static int dequant_coefres[6][16] =
{
{10, 13, 13, 10, 16, 10, 13, 13, 13, 13, 16, 10, 16, 13, 13, 16},
{11, 14, 14, 11, 18, 11, 14, 14, 14, 14, 18, 11, 18, 14, 14, 18},
{13, 16, 16, 13, 20, 13, 16, 16, 16, 16, 20, 13, 20, 16, 16, 20},
{14, 18, 18, 14, 23, 14, 18, 18, 18, 18, 23, 14, 23, 18, 18, 23},
{16, 20, 20, 16, 25, 16, 20, 20, 20, 20, 25, 16, 25, 20, 20, 25},
{18, 23, 23, 18, 29, 18, 23, 23, 23, 23, 29, 18, 29, 23, 23, 29}
};
/**
From jm7.6 block.c. (in zigzag scan) */
const static int quant_coef[6][16] =
{
{13107, 8066, 8066, 13107, 5243, 13107, 8066, 8066, 8066, 8066, 5243, 13107, 5243, 8066, 8066, 5243},
{11916, 7490, 7490, 11916, 4660, 11916, 7490, 7490, 7490, 7490, 4660, 11916, 4660, 7490, 7490, 4660},
{10082, 6554, 6554, 10082, 4194, 10082, 6554, 6554, 6554, 6554, 4194, 10082, 4194, 6554, 6554, 4194},
{9362, 5825, 5825, 9362, 3647, 9362, 5825, 5825, 5825, 5825, 3647, 9362, 3647, 5825, 5825, 3647},
{8192, 5243, 5243, 8192, 3355, 8192, 5243, 5243, 5243, 5243, 3355, 8192, 3355, 5243, 5243, 3355},
{7282, 4559, 4559, 7282, 2893, 7282, 4559, 4559, 4559, 4559, 2893, 7282, 2893, 4559, 4559, 2893}
};
/**
Convert scan from raster scan order to block decoding order and
from block decoding order to raster scan order. Same table!!!
*/
const static uint8 ras2dec[16] = {0, 1, 4, 5, 2, 3, 6, 7, 8, 9, 12, 13, 10, 11, 14, 15};
/* mapping from level_idc to index map */
const static uint8 mapLev2Idx[61] = {255, 255, 255, 255, 255, 255, 255, 255, 255, 1,
0, 1, 2, 3, 255, 255, 255, 255, 255, 255,
4, 5, 6, 255, 255, 255, 255, 255, 255, 255,
7, 8, 9, 255, 255, 255, 255, 255, 255, 255,
10, 11, 12, 255, 255, 255, 255, 255, 255, 255,
13, 14, 255, 255, 255, 255, 255, 255, 255, 255
};
/* map back from index to Level IDC */
const static uint8 mapIdx2Lev[MAX_LEVEL_IDX] = {10, 11, 12, 13, 20, 21, 22, 30, 31, 32, 40, 41, 42, 50, 51};
/**
from the index map to the MaxDPB value times 2 */
const static int32 MaxDPBX2[MAX_LEVEL_IDX] = {297, 675, 1782, 1782, 1782, 3564, 6075, 6075,
13500, 15360, 24576, 24576, 24576, 82620, 138240
};
/* map index to the max frame size */
const static int MaxFS[MAX_LEVEL_IDX] = {99, 396, 396, 396, 396, 792, 1620, 1620, 3600, 5120,
8192, 8192, 8192, 22080, 36864
};
/* map index to max MB processing rate */
const static int32 MaxMBPS[MAX_LEVEL_IDX] = {1485, 3000, 6000, 11880, 11880, 19800, 20250, 40500,
108000, 216000, 245760, 245760, 491520, 589824, 983040
};
/* map index to max video bit rate */
const static uint32 MaxBR[MAX_LEVEL_IDX] = {64, 192, 384, 768, 2000, 4000, 4000, 10000, 14000, 20000,
20000, 50000, 50000, 135000, 240000
};
/* map index to max CPB size */
const static uint32 MaxCPB[MAX_LEVEL_IDX] = {175, 500, 1000, 2000, 2000, 4000, 4000, 10000, 14000,
20000, 25000, 62500, 62500, 135000, 240000
};
/* map index to max vertical MV range */
const static int MaxVmvR[MAX_LEVEL_IDX] = {64, 128, 128, 128, 128, 256, 256, 256, 512, 512, 512, 512, 512, 512, 512};
#endif /* _AVCINT_COMMON_H_ */
================================================
FILE: RtspCamera/jni/avc_h264/common/include/avclib_common.h
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/**
This file contains declarations of internal functions for common encoder/decoder library.
@publishedAll
*/
#ifndef AVCCOMMON_LIB_H_INCLUDED
#define AVCCOMMON_LIB_H_INCLUDED
#ifndef AVCINT_COMMON_H_INCLUDED
#include "avcint_common.h"
#endif
#ifndef OSCL_BASE_H_INCLUDED
#include "oscl_base.h"
#endif
/*----------- deblock.c --------------*/
/**
This function performs conditional deblocking on a complete picture.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS for success and AVC_FAIL otherwise."
*/
OSCL_IMPORT_REF AVCStatus DeblockPicture(AVCCommonObj *video);
/**
This function performs MB-based deblocking when MB_BASED_DEBLOCK
is defined at compile time.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS for success and AVC_FAIL otherwise."
*/
void MBInLoopDeblock(AVCCommonObj *video);
/*---------- dpb.c --------------------*/
/**
This function is called everytime a new sequence is detected.
\param "avcHandle" "Pointer to AVCHandle."
\param "video" "Pointer to AVCCommonObj."
\param "padding" "Flag specifying whether padding in luma component is needed (used for encoding)."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus AVCConfigureSequence(AVCHandle *avcHandle, AVCCommonObj *video, bool padding);
/**
This function allocates and initializes the decoded picture buffer structure based on
the profile and level for the first sequence parameter set. Currently,
it does not allow changing in profile/level for subsequent SPS.
\param "avcHandle" "Pointer to AVCHandle."
\param "video" "Pointer to AVCCommonObj."
\param "FrameHeightInMbs" "Height of the frame in the unit of MBs."
\param "PicWidthInMbs" "Width of the picture in the unit of MBs."
\param "padding" "Flag specifying whether padding in luma component is needed (used for encoding)."
\return "AVC_SUCCESS or AVC_FAIL."
*/
AVCStatus InitDPB(AVCHandle *avcHandle, AVCCommonObj *video, int FrameHeightInMbs, int PicWidthInMbs, bool padding);
/**
This function frees the DPB memory.
\param "avcHandle" "Pointer to AVCHandle."
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus CleanUpDPB(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function finds empty frame in the decoded picture buffer to be used for the
current picture, initializes the corresponding picture structure with Sl, Scb, Scr,
width, height and pitch.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus DPBInitBuffer(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function finds empty frame in the decoded picture buffer to be used for the
current picture, initializes the corresponding picture structure with Sl, Scb, Scr,
width, height and pitch.
\param "video" "Pointer to AVCCommonObj."
\param "CurrPicNum" "Current picture number (only used in decoder)."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF void DPBInitPic(AVCCommonObj *video, int CurrPicNum);
/**
This function releases the current frame back to the available pool for skipped frame after encoding.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to the AVCCommonObj."
\return "void."
*/
OSCL_IMPORT_REF void DPBReleaseCurrentFrame(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function performs decoded reference picture marking process and store the current picture to the
corresponding frame storage in the decoded picture buffer.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to the AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL."
*/
OSCL_IMPORT_REF AVCStatus StorePictureInDPB(AVCHandle *avcHandle, AVCCommonObj *video);
/**
This function perform sliding window operation on the reference picture lists, see subclause 8.2.5.3.
It removes short-term ref frames with smallest FrameNumWrap from the reference list.
\param "avcHandle" "Pointer to the main handle object."
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\return "AVC_SUCCESS or AVC_FAIL (contradicting values or scenario as in the Note in the draft)."
*/
AVCStatus sliding_window_process(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb);
/**
This function perform adaptive memory marking operation on the reference picture lists,
see subclause 8.2.5.4. It calls other functions for specific operations.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "sliceHdr" "Pointer to the AVCSliceHeader."
\return "AVC_SUCCESS or AVC_FAIL (contradicting values or scenario as in the Note in the draft)."
*/
AVCStatus adaptive_memory_marking(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, AVCSliceHeader *sliceHdr);
/**
This function performs memory management control operation 1, marking a short-term picture
as unused for reference. See subclause 8.2.5.4.1.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "difference_of_pic_nums_minus1" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp1(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, int difference_of_pic_nums_minus1);
/**
This function performs memory management control operation 2, marking a long-term picture
as unused for reference. See subclause 8.2.5.4.2.
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "field_pic_flag" "Flag whether the current picture is field or not."
\param "long_term_pic_num" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp2(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, int long_term_pic_num);
/**
This function performs memory management control operation 3, assigning a LongTermFrameIdx to
a short-term reference picture. See subclause 8.2.5.4.3.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "difference_of_pic_nums_minus1" "From the syntax in dec_ref_pic_marking()."
\param "long_term_pic_num" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp3(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint difference_of_pic_nums_minus1,
uint long_term_frame_idx);
/**
This function performs memory management control operation 4, getting new MaxLongTermFrameIdx.
See subclause 8.2.5.4.4.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "max_long_term_frame_idx_plus1" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp4(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint max_long_term_frame_idx_plus1);
/**
This function performs memory management control operation 5, marking all reference pictures
as unused for reference and set MaxLongTermFrameIdx to no long-termframe indices.
See subclause 8.2.5.4.5.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
*/
void MemMgrCtrlOp5(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb);
/**
This function performs memory management control operation 6, assigning a long-term frame index
to the current picture. See subclause 8.2.5.4.6.
\param "video" "Pointer to the AVCCommonObj."
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "long_term_frame_idx" "From the syntax in dec_ref_pic_marking()."
*/
void MemMgrCtrlOp6(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint long_term_frame_idx);
/**
This function mark a long-term ref frame with a specific frame index as unused for reference.
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "long_term_frame_idx" "To look for"
*/
void unmark_long_term_frame_for_reference_by_frame_idx(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint long_term_frame_idx);
/**
This function mark a long-term ref field with a specific frame index as unused for reference except
a frame that contains a picture with picNumX.
\param "dpb" "Pointer to the AVCDecPicBuffer."
\param "long_term_frame_idx" "To look for."
\param "picNumX" "To look for."
*/
void unmark_long_term_field_for_reference_by_frame_idx(AVCCommonObj *video, AVCDecPicBuffer *dpb, uint long_term_frame_indx, int picNumX);
/**
This function mark a frame to unused for reference.
\param "fs" "Pointer to AVCFrameStore to be unmarked."
*/
void unmark_for_reference(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint idx);
void update_ref_list(AVCDecPicBuffer *dpb);
/*---------- fmo.c --------------*/
/**
This function initializes flexible macroblock reordering.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS for success and AVC_FAIL otherwise."
*/
OSCL_IMPORT_REF AVCStatus FMOInit(AVCCommonObj *video);
/**
This function fills up an array that maps Map unit to the slice group
following the interleaved slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "run_length_minus1" "Array of the run-length."
\param "num_slice_groups_minus_1" "Number of slice group minus 1."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "Void."
*/
void FmoGenerateType0MapUnitMap(int *mapUnitToSliceGroupMap, uint *run_length_minus1, uint num_slice_groups_minus1, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following the dispersed slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "PicWidthInMbs" "Width of the luma picture in macroblock unit."
\param "num_slice_groups_minus_1" "Number of slice group minus 1."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "Void."
*/
void FmoGenerateType1MapUnitMap(int *mapUnitToSliceGroupMap, int PicWidthInMbs, uint num_slice_groups_minus1, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following the foreground with left-over slice group map type.
\param "pps" "Pointer to AVCPicParamSets structure."
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "PicWidthInMbs" "Width of the luma picture in macroblock unit."
\param "num_slice_groups_minus_1" "Number of slice group minus 1."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "Void."
*/
void FmoGenerateType2MapUnitMap(AVCPicParamSet *pps, int *mapUnitToSliceGroupMap, int PicWidthInMbs,
uint num_slice_groups_minus1, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following the box-out slice group map type.
\param "pps" "Pointer to AVCPicParamSets structure."
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "PicWidthInMbs" "Width of the luma picture in macroblock unit."
\return "Void."
*/
void FmoGenerateType3MapUnitMap(AVCCommonObj *video, AVCPicParamSet* pps, int *mapUnitToSliceGroupMap,
int PicWidthInMbs);
/**
This function fills up an array that maps Map unit to the slice group
following the raster scan slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "MapUnitsInSliceGroup0" "Derived in subclause 7.4.3."
\param "slice_group_change_direction_flag" "A value from the slice header."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "void"
*/
void FmoGenerateType4MapUnitMap(int *mapUnitToSliceGroupMap, int MapUnitsInSliceGroup0,
int slice_group_change_direction_flag, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following wipe slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "video" "Pointer to AVCCommonObj structure."
\param "slice_group_change_direction_flag" "A value from the slice header."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "void"
*/
void FmoGenerateType5MapUnitMap(int *mapUnitsToSliceGroupMap, AVCCommonObj *video,
int slice_group_change_direction_flag, uint PicSizeInMapUnits);
/**
This function fills up an array that maps Map unit to the slice group
following wipe slice group map type.
\param "mapUnitToSliceGroupMap" "Array of slice group mapping."
\param "slice_group_id" "Array of slice_group_id from AVCPicParamSet structure."
\param "PicSizeInMapUnit" "Size of the picture in number Map units."
\return "void"
*/
void FmoGenerateType6MapUnitMap(int *mapUnitsToSliceGroupMap, int *slice_group_id, uint PicSizeInMapUnits);
/*------------- itrans.c --------------*/
/**
This function performs transformation of the Intra16x16DC value according to
subclause 8.5.6.
\param "block" "Pointer to the video->block[0][0][0]."
\param "QPy" "Quantization parameter."
\return "void."
*/
void Intra16DCTrans(int16 *block, int Qq, int Rq);
/**
This function performs transformation of a 4x4 block according to
subclause 8.5.8.
\param "block" "Pointer to the origin of transform coefficient area."
\param "pred" "Pointer to the origin of predicted area."
\param "cur" "Pointer to the origin of the output area."
\param "width" "Pitch of cur."
\return "void."
*/
void itrans(int16 *block, uint8 *pred, uint8 *cur, int width);
/*
This function is the same one as itrans except for chroma.
\param "block" "Pointer to the origin of transform coefficient area."
\param "pred" "Pointer to the origin of predicted area."
\param "cur" "Pointer to the origin of the output area."
\param "width" "Pitch of cur."
\return "void."
*/
void ictrans(int16 *block, uint8 *pred, uint8 *cur, int width);
/**
This function performs transformation of the DCChroma value according to
subclause 8.5.7.
\param "block" "Pointer to the video->block[0][0][0]."
\param "QPc" "Quantization parameter."
\return "void."
*/
void ChromaDCTrans(int16 *block, int Qq, int Rq);
/**
This function copies a block from pred to cur.
\param "pred" "Pointer to prediction block."
\param "cur" "Pointer to the current YUV block."
\param "width" "Pitch of cur memory."
\param "pred_pitch" "Pitch for pred memory.
\return "void."
*/
void copy_block(uint8 *pred, uint8 *cur, int width, int pred_pitch);
/*--------- mb_access.c ----------------*/
/**
This function initializes the neighboring information before start macroblock decoding.
\param "video" "Pointer to AVCCommonObj."
\param "mbNum" "The current macroblock index."
\param "currMB" "Pointer to the current AVCMacroblock structure."
\return "void"
*/
OSCL_IMPORT_REF void InitNeighborAvailability(AVCCommonObj *video, int mbNum);
/**
This function checks whether the requested neighboring macroblock is available.
\param "MbToSliceGroupMap" "Array containing the slice group ID mapping to MB index."
\param "PicSizeInMbs" "Size of the picture in number of MBs."
\param "mbAddr" "Neighboring macroblock index to check."
\param "currMbAddr" "Current macroblock index."
\return "TRUE if the neighboring MB is available, FALSE otherwise."
*/
bool mb_is_available(AVCMacroblock *mblock, uint PicSizeInMbs, int mbAddr, int currMbAddr);
/**
This function performs prediction of the nonzero coefficient for a luma block (i,j).
\param "video" "Pointer to AVCCommonObj."
\param "i" "Block index, horizontal."
\param "j" "Block index, vertical."
\return "Predicted number of nonzero coefficient."
*/
OSCL_IMPORT_REF int predict_nnz(AVCCommonObj *video, int i, int j);
/**
This function performs prediction of the nonzero coefficient for a chroma block (i,j).
\param "video" "Pointer to AVCCommonObj."
\param "i" "Block index, horizontal."
\param "j" "Block index, vertical."
\return "Predicted number of nonzero coefficient."
*/
OSCL_IMPORT_REF int predict_nnz_chroma(AVCCommonObj *video, int i, int j);
/**
This function calculates the predicted motion vectors for the current macroblock.
\param "video" "Pointer to AVCCommonObj."
\param "encFlag" "Boolean whether this function is used by encoder or decoder."
\return "void."
*/
OSCL_IMPORT_REF void GetMotionVectorPredictor(AVCCommonObj *video, int encFlag);
/*---------- reflist.c -----------------*/
/**
This function initializes reference picture list used in INTER prediction
at the beginning of each slice decoding. See subclause 8.2.4.
\param "video" "Pointer to AVCCommonObj."
\return "void"
Output is video->RefPicList0, video->RefPicList1, video->refList0Size and video->refList1Size.
*/
OSCL_IMPORT_REF void RefListInit(AVCCommonObj *video);
/**
This function generates picture list from frame list. Used when current picture is field.
see subclause 8.2.4.2.5.
\param "video" "Pointer to AVCCommonObj."
\param "IsL1" "Is L1 list?"
\param "long_term" "Is long-term prediction?"
\return "void"
*/
void GenPicListFromFrameList(AVCCommonObj *video, int IsL1, int long_term);
/**
This function performs reference picture list reordering according to the
ref_pic_list_reordering() syntax. See subclause 8.2.4.3.
\param "video" "Pointer to AVCCommonObj."
\return "AVC_SUCCESS or AVC_FAIL"
Output is video->RefPicList0, video->RefPicList1, video->refList0Size and video->refList1Size.
*/
OSCL_IMPORT_REF AVCStatus ReOrderList(AVCCommonObj *video);
/**
This function performs reference picture list reordering according to the
ref_pic_list_reordering() syntax regardless of list 0 or list 1. See subclause 8.2.4.3.
\param "video" "Pointer to AVCCommonObj."
\param "isL1" "Is list 1 or not."
\return "AVC_SUCCESS or AVC_FAIL"
Output is video->RefPicList0 and video->refList0Size or video->RefPicList1 and video->refList1Size.
*/
AVCStatus ReorderRefPicList(AVCCommonObj *video, int isL1);
/**
This function performs reordering process of reference picture list for short-term pictures.
See subclause 8.2.4.3.1.
\param "video" "Pointer to AVCCommonObj."
\param "picNumLX" "picNumLX of an entry in the reference list."
\param "refIdxLX" "Pointer to the current entry index in the reference."
\param "isL1" "Is list 1 or not."
\return "AVC_SUCCESS or AVC_FAIL"
*/
AVCStatus ReorderShortTerm(AVCCommonObj *video, int picNumLX, int *refIdxLX, int isL1);
/**
This function performs reordering process of reference picture list for long-term pictures.
See subclause 8.2.4.3.2.
\param "video" "Pointer to AVCCommonObj."
\param "LongTermPicNum" "LongTermPicNum of an entry in the reference list."
\param "refIdxLX" "Pointer to the current entry index in the reference."
\param "isL1" "Is list 1 or not."
\return "AVC_SUCCESS or AVC_FAIL"
*/
AVCStatus ReorderLongTerm(AVCCommonObj *video, int LongTermPicNum, int *refIdxLX, int isL1);
/**
This function gets the pictures in DPB according to the PicNum.
\param "video" "Pointer to AVCCommonObj."
\param "picNum" "PicNum of the picture we are looking for."
\return "Pointer to the AVCPictureData or NULL if not found"
*/
AVCPictureData* GetShortTermPic(AVCCommonObj *video, int picNum);
/**
This function gets the pictures in DPB according to the LongtermPicNum.
\param "video" "Pointer to AVCCommonObj."
\param "LongtermPicNum" "LongtermPicNum of the picture we are looking for."
\return "Pointer to the AVCPictureData."
*/
AVCPictureData* GetLongTermPic(AVCCommonObj *video, int LongtermPicNum);
/**
This function indicates whether the picture is used for short-term reference or not.
\param "s" "Pointer to AVCPictureData."
\return "1 if it is used for short-term, 0 otherwise."
*/
int is_short_ref(AVCPictureData *s);
/**
This function indicates whether the picture is used for long-term reference or not.
\param "s" "Pointer to AVCPictureData."
\return "1 if it is used for long-term, 0 otherwise."
*/
int is_long_ref(AVCPictureData *s);
/**
This function sorts array of pointers to AVCPictureData in descending order of
the PicNum value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByPicNum(AVCPictureData *data[], int num);
/**
This function sorts array of pointers to AVCPictureData in ascending order of
the PicNum value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByPicNumLongTerm(AVCPictureData *data[], int num);
/**
This function sorts array of pointers to AVCFrameStore in descending order of
the FrameNumWrap value.
\param "data" "Array of pointers to AVCFrameStore."
\param "num" "Size of the array."
\return "void"
*/
void SortFrameByFrameNumWrap(AVCFrameStore *data[], int num);
/**
This function sorts array of pointers to AVCFrameStore in ascending order of
the LongTermFrameIdx value.
\param "data" "Array of pointers to AVCFrameStore."
\param "num" "Size of the array."
\return "void"
*/
void SortFrameByLTFrameIdx(AVCFrameStore *data[], int num);
/**
This function sorts array of pointers to AVCPictureData in descending order of
the PicOrderCnt value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByPOC(AVCPictureData *data[], int num, int descending);
/**
This function sorts array of pointers to AVCPictureData in ascending order of
the LongTermPicNum value.
\param "data" "Array of pointers to AVCPictureData."
\param "num" "Size of the array."
\return "void"
*/
void SortPicByLTPicNum(AVCPictureData *data[], int num);
/**
This function sorts array of pointers to AVCFrameStore in descending order of
the PicOrderCnt value.
\param "data" "Array of pointers to AVCFrameStore."
\param "num" "Size of the array."
\return "void"
*/
void SortFrameByPOC(AVCFrameStore *data[], int num, int descending);
#endif /* _AVCCOMMON_LIB_H_ */
================================================
FILE: RtspCamera/jni/avc_h264/common/src/deblock.cpp
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
#include "oscl_mem.h"
#define MAX_QP 51
#define MB_BLOCK_SIZE 16
// NOTE: these 3 tables are for funtion GetStrength() only
const static int ININT_STRENGTH[4] = {0x04040404, 0x03030303, 0x03030303, 0x03030303};
// NOTE: these 3 tables are for funtion EdgeLoop() only
// NOTE: to change the tables below for instance when the QP doubling is changed from 6 to 8 values
const static int ALPHA_TABLE[52] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 5, 6, 7, 8, 9, 10, 12, 13, 15, 17, 20, 22, 25, 28, 32, 36, 40, 45, 50, 56, 63, 71, 80, 90, 101, 113, 127, 144, 162, 182, 203, 226, 255, 255} ;
const static int BETA_TABLE[52] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18} ;
const static int CLIP_TAB[52][5] =
{
{ 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0},
{ 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0}, { 0, 0, 0, 0, 0},
{ 0, 0, 0, 0, 0}, { 0, 0, 0, 1, 1}, { 0, 0, 0, 1, 1}, { 0, 0, 0, 1, 1}, { 0, 0, 0, 1, 1}, { 0, 0, 1, 1, 1}, { 0, 0, 1, 1, 1}, { 0, 1, 1, 1, 1},
{ 0, 1, 1, 1, 1}, { 0, 1, 1, 1, 1}, { 0, 1, 1, 1, 1}, { 0, 1, 1, 2, 2}, { 0, 1, 1, 2, 2}, { 0, 1, 1, 2, 2}, { 0, 1, 1, 2, 2}, { 0, 1, 2, 3, 3},
{ 0, 1, 2, 3, 3}, { 0, 2, 2, 3, 3}, { 0, 2, 2, 4, 4}, { 0, 2, 3, 4, 4}, { 0, 2, 3, 4, 4}, { 0, 3, 3, 5, 5}, { 0, 3, 4, 6, 6}, { 0, 3, 4, 6, 6},
{ 0, 4, 5, 7, 7}, { 0, 4, 5, 8, 8}, { 0, 4, 6, 9, 9}, { 0, 5, 7, 10, 10}, { 0, 6, 8, 11, 11}, { 0, 6, 8, 13, 13}, { 0, 7, 10, 14, 14}, { 0, 8, 11, 16, 16},
{ 0, 9, 12, 18, 18}, { 0, 10, 13, 20, 20}, { 0, 11, 15, 23, 23}, { 0, 13, 17, 25, 25}
};
// NOTE: this table is only QP clipping, index = QP + video->FilterOffsetA/B, clipped to [0, 51]
// video->FilterOffsetA/B is in {-12, 12]
const static int QP_CLIP_TAB[76] =
{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // [-12, 0]
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 26, 27, 28, 29, 30,
31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, // [1, 51]
51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51 // [52,63]
};
static void DeblockMb(AVCCommonObj *video, int mb_x, int mb_y, uint8 *SrcY, uint8 *SrcU, uint8 *SrcV);
//static void GetStrength(AVCCommonObj *video, uint8 *Strength, AVCMacroblock* MbP, AVCMacroblock* MbQ, int dir, int edge);
static void GetStrength_Edge0(uint8 *Strength, AVCMacroblock* MbP, AVCMacroblock* MbQ, int dir);
static void GetStrength_VerticalEdges(uint8 *Strength, AVCMacroblock* MbQ);
static void GetStrength_HorizontalEdges(uint8 Strength[12], AVCMacroblock* MbQ);
static void EdgeLoop_Luma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch);
static void EdgeLoop_Luma_horizontal(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch);
static void EdgeLoop_Chroma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch);
static void EdgeLoop_Chroma_horizontal(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch);
/*
*****************************************************************************************
* \brief Filter all macroblocks in order of increasing macroblock address.
*****************************************************************************************
*/
OSCL_EXPORT_REF AVCStatus DeblockPicture(AVCCommonObj *video)
{
uint i, j;
int pitch = video->currPic->pitch, pitch_c, width;
uint8 *SrcY, *SrcU, *SrcV;
SrcY = video->currPic->Sl; // pointers to source
SrcU = video->currPic->Scb;
SrcV = video->currPic->Scr;
pitch_c = pitch >> 1;
width = video->currPic->width;
for (i = 0; i < video->PicHeightInMbs; i++)
{
for (j = 0; j < video->PicWidthInMbs; j++)
{
DeblockMb(video, j, i, SrcY, SrcU, SrcV);
// update SrcY, SrcU, SrcV
SrcY += MB_BLOCK_SIZE;
SrcU += (MB_BLOCK_SIZE >> 1);
SrcV += (MB_BLOCK_SIZE >> 1);
}
SrcY += ((pitch << 4) - width);
SrcU += ((pitch_c << 3) - (width >> 1));
SrcV += ((pitch_c << 3) - (width >> 1));
}
return AVC_SUCCESS;
}
#ifdef MB_BASED_DEBLOCK
/*
*****************************************************************************************
* \brief Filter one macroblocks in a fast macroblock memory and copy it to frame
*****************************************************************************************
*/
void MBInLoopDeblock(AVCCommonObj *video)
{
AVCPictureData *currPic = video->currPic;
#ifdef USE_PRED_BLOCK
uint8 *predCb, *predCr, *pred_block;
int i, j, dst_width, dst_height, dst_widthc, dst_heightc;
#endif
int pitch = currPic->pitch;
int x_pos = video->mb_x;
int y_pos = video->mb_y;
uint8 *curL, *curCb, *curCr;
int offset;
offset = (y_pos << 4) * pitch;
curL = currPic->Sl + offset + (x_pos << 4);
offset >>= 2;
offset += (x_pos << 3);
curCb = currPic->Scb + offset;
curCr = currPic->Scr + offset;
#ifdef USE_PRED_BLOCK
pred_block = video->pred;
/* 1. copy neighboring pixels from frame to the video->pred_block */
if (y_pos) /* not the 0th row */
{
/* copy to the top 4 lines of the macroblock */
curL -= (pitch << 2); /* go back 4 lines */
oscl_memcpy(pred_block + 4, curL, 16);
curL += pitch;
oscl_memcpy(pred_block + 24, curL, 16);
curL += pitch;
oscl_memcpy(pred_block + 44, curL, 16);
curL += pitch;
oscl_memcpy(pred_block + 64, curL, 16);
curL += pitch;
curCb -= (pitch << 1); /* go back 4 lines chroma */
curCr -= (pitch << 1);
pred_block += 400;
oscl_memcpy(pred_block + 4, curCb, 8);
curCb += (pitch >> 1);
oscl_memcpy(pred_block + 16, curCb, 8);
curCb += (pitch >> 1);
oscl_memcpy(pred_block + 28, curCb, 8);
curCb += (pitch >> 1);
oscl_memcpy(pred_block + 40, curCb, 8);
curCb += (pitch >> 1);
pred_block += 144;
oscl_memcpy(pred_block + 4, curCr, 8);
curCr += (pitch >> 1);
oscl_memcpy(pred_block + 16, curCr, 8);
curCr += (pitch >> 1);
oscl_memcpy(pred_block + 28, curCr, 8);
curCr += (pitch >> 1);
oscl_memcpy(pred_block + 40, curCr, 8);
curCr += (pitch >> 1);
pred_block = video->pred;
}
/* 2. perform deblocking. */
DeblockMb(video, x_pos, y_pos, pred_block + 84, pred_block + 452, pred_block + 596);
/* 3. copy it back to the frame and update pred_block */
predCb = pred_block + 400;
predCr = predCb + 144;
/* find the range of the block inside pred_block to be copied back */
if (y_pos) /* the first row */
{
curL -= (pitch << 2);
curCb -= (pitch << 1);
curCr -= (pitch << 1);
dst_height = 20;
dst_heightc = 12;
}
else
{
pred_block += 80;
predCb += 48;
predCr += 48;
dst_height = 16;
dst_heightc = 8;
}
if (x_pos) /* find the width */
{
curL -= 4;
curCb -= 4;
curCr -= 4;
if (x_pos == (int)(video->PicWidthInMbs - 1))
{
dst_width = 20;
dst_widthc = 12;
}
else
{
dst_width = 16;
dst_widthc = 8;
}
}
else
{
pred_block += 4;
predCb += 4;
predCr += 4;
dst_width = 12;
dst_widthc = 4;
}
/* perform copy */
for (j = 0; j < dst_height; j++)
{
oscl_memcpy(curL, pred_block, dst_width);
curL += pitch;
pred_block += 20;
}
for (j = 0; j < dst_heightc; j++)
{
oscl_memcpy(curCb, predCb, dst_widthc);
oscl_memcpy(curCr, predCr, dst_widthc);
curCb += (pitch >> 1);
curCr += (pitch >> 1);
predCb += 12;
predCr += 12;
}
if (x_pos != (int)(video->PicWidthInMbs - 1)) /* now copy from the right-most 4 columns to the left-most 4 columns */
{
pred_block = video->pred;
for (i = 0; i < 20; i += 4)
{
*((uint32*)pred_block) = *((uint32*)(pred_block + 16));
pred_block += 20;
*((uint32*)pred_block) = *((uint32*)(pred_block + 16));
pred_block += 20;
*((uint32*)pred_block) = *((uint32*)(pred_block + 16));
pred_block += 20;
*((uint32*)pred_block) = *((uint32*)(pred_block + 16));
pred_block += 20;
}
for (i = 0; i < 24; i += 4)
{
*((uint32*)pred_block) = *((uint32*)(pred_block + 8));
pred_block += 12;
*((uint32*)pred_block) = *((uint32*)(pred_block + 8));
pred_block += 12;
*((uint32*)pred_block) = *((uint32*)(pred_block + 8));
pred_block += 12;
*((uint32*)pred_block) = *((uint32*)(pred_block + 8));
pred_block += 12;
}
}
#else
DeblockMb(video, x_pos, y_pos, curL, curCb, curCr);
#endif
return ;
}
#endif
/*
*****************************************************************************************
* \brief Deblocking filter for one macroblock.
*****************************************************************************************
*/
void DeblockMb(AVCCommonObj *video, int mb_x, int mb_y, uint8 *SrcY, uint8 *SrcU, uint8 *SrcV)
{
AVCMacroblock *MbP, *MbQ;
int edge, QP, QPC;
int filterLeftMbEdgeFlag = (mb_x != 0);
int filterTopMbEdgeFlag = (mb_y != 0);
int pitch = video->currPic->pitch;
int indexA, indexB, tmp;
int Alpha, Beta, Alpha_c, Beta_c;
int mbNum = mb_y * video->PicWidthInMbs + mb_x;
int *clipTable, *clipTable_c, *qp_clip_tab;
uint8 Strength[16];
void* str;
MbQ = &(video->mblock[mbNum]); // current Mb
// If filter is disabled, return
if (video->sliceHdr->disable_deblocking_filter_idc == 1) return;
if (video->sliceHdr->disable_deblocking_filter_idc == 2)
{
// don't filter at slice boundaries
filterLeftMbEdgeFlag = mb_is_available(video->mblock, video->PicSizeInMbs, mbNum - 1, mbNum);
filterTopMbEdgeFlag = mb_is_available(video->mblock, video->PicSizeInMbs, mbNum - video->PicWidthInMbs, mbNum);
}
/* NOTE: edge=0 and edge=1~3 are separate cases because of the difference of MbP, index A and indexB calculation */
/* for edge = 1~3, MbP, indexA and indexB remain the same, and thus there is no need to re-calculate them for each edge */
qp_clip_tab = (int *)QP_CLIP_TAB + 12;
/* 1.VERTICAL EDGE + MB BOUNDARY (edge = 0) */
if (filterLeftMbEdgeFlag)
{
MbP = MbQ - 1;
//GetStrength(video, Strength, MbP, MbQ, 0, 0); // Strength for 4 blks in 1 stripe, 0 => vertical edge
GetStrength_Edge0(Strength, MbP, MbQ, 0);
str = (void*)Strength; //de-ref type-punned pointer fix
if (*((uint32*)str)) // only if one of the 4 Strength bytes is != 0
{
QP = (MbP->QPy + MbQ->QPy + 1) >> 1; // Average QP of the two blocks;
indexA = QP + video->FilterOffsetA;
indexB = QP + video->FilterOffsetB;
indexA = qp_clip_tab[indexA]; // IClip(0, MAX_QP, QP+video->FilterOffsetA)
indexB = qp_clip_tab[indexB]; // IClip(0, MAX_QP, QP+video->FilterOffsetB)
Alpha = ALPHA_TABLE[indexA];
Beta = BETA_TABLE[indexB];
clipTable = (int *) CLIP_TAB[indexA];
if (Alpha > 0 && Beta > 0)
#ifdef USE_PRED_BLOCK
EdgeLoop_Luma_vertical(SrcY, Strength, Alpha, Beta, clipTable, 20);
#else
EdgeLoop_Luma_vertical(SrcY, Strength, Alpha, Beta, clipTable, pitch);
#endif
QPC = (MbP->QPc + MbQ->QPc + 1) >> 1;
indexA = QPC + video->FilterOffsetA;
indexB = QPC + video->FilterOffsetB;
indexA = qp_clip_tab[indexA]; // IClip(0, MAX_QP, QP+video->FilterOffsetA)
indexB = qp_clip_tab[indexB]; // IClip(0, MAX_QP, QP+video->FilterOffsetB)
Alpha = ALPHA_TABLE[indexA];
Beta = BETA_TABLE[indexB];
clipTable = (int *) CLIP_TAB[indexA];
if (Alpha > 0 && Beta > 0)
{
#ifdef USE_PRED_BLOCK
EdgeLoop_Chroma_vertical(SrcU, Strength, Alpha, Beta, clipTable, 12);
EdgeLoop_Chroma_vertical(SrcV, Strength, Alpha, Beta, clipTable, 12);
#else
EdgeLoop_Chroma_vertical(SrcU, Strength, Alpha, Beta, clipTable, pitch >> 1);
EdgeLoop_Chroma_vertical(SrcV, Strength, Alpha, Beta, clipTable, pitch >> 1);
#endif
}
}
} /* end of: if(filterLeftMbEdgeFlag) */
/* 2.VERTICAL EDGE (no boundary), the edges are all inside a MB */
/* First calculate the necesary parameters all at once, outside the loop */
MbP = MbQ;
indexA = MbQ->QPy + video->FilterOffsetA;
indexB = MbQ->QPy + video->FilterOffsetB;
// index
indexA = qp_clip_tab[indexA]; // IClip(0, MAX_QP, QP+video->FilterOffsetA)
indexB = qp_clip_tab[indexB]; // IClip(0, MAX_QP, QP+video->FilterOffsetB)
Alpha = ALPHA_TABLE[indexA];
Beta = BETA_TABLE[indexB];
clipTable = (int *)CLIP_TAB[indexA];
/* Save Alpha, Beta and clipTable for future use, with the obselete variables filterLeftMbEdgeFlag, mbNum amd tmp */
filterLeftMbEdgeFlag = Alpha;
mbNum = Beta;
tmp = (int)clipTable;
indexA = MbQ->QPc + video->FilterOffsetA;
indexB = MbQ->QPc + video->FilterOffsetB;
indexA = qp_clip_tab[indexA]; // IClip(0, MAX_QP, QP+video->FilterOffsetA)
indexB = qp_clip_tab[indexB]; // IClip(0, MAX_QP, QP+video->FilterOffsetB)
Alpha_c = ALPHA_TABLE[indexA];
Beta_c = BETA_TABLE[indexB];
clipTable_c = (int *)CLIP_TAB[indexA];
GetStrength_VerticalEdges(Strength + 4, MbQ); // Strength for 4 blks in 1 stripe, 0 => vertical edge
for (edge = 1; edge < 4; edge++) // 4 vertical strips of 16 pel
{
//GetStrength_VerticalEdges(video, Strength, MbP, MbQ, 0, edge); // Strength for 4 blks in 1 stripe, 0 => vertical edge
if (*((int*)(Strength + (edge << 2)))) // only if one of the 4 Strength bytes is != 0
{
if (Alpha > 0 && Beta > 0)
#ifdef USE_PRED_BLOCK
EdgeLoop_Luma_vertical(SrcY + (edge << 2), Strength + (edge << 2), Alpha, Beta, clipTable, 20);
#else
EdgeLoop_Luma_vertical(SrcY + (edge << 2), Strength + (edge << 2), Alpha, Beta, clipTable, pitch);
#endif
if (!(edge & 1) && Alpha_c > 0 && Beta_c > 0)
{
#ifdef USE_PRED_BLOCK
EdgeLoop_Chroma_vertical(SrcU + (edge << 1), Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, 12);
EdgeLoop_Chroma_vertical(SrcV + (edge << 1), Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, 12);
#else
EdgeLoop_Chroma_vertical(SrcU + (edge << 1), Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, pitch >> 1);
EdgeLoop_Chroma_vertical(SrcV + (edge << 1), Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, pitch >> 1);
#endif
}
}
} //end edge
/* 3.HORIZONTAL EDGE + MB BOUNDARY (edge = 0) */
if (filterTopMbEdgeFlag)
{
MbP = MbQ - video->PicWidthInMbs;
//GetStrength(video, Strength, MbP, MbQ, 1, 0); // Strength for 4 blks in 1 stripe, 0 => vertical edge
GetStrength_Edge0(Strength, MbP, MbQ, 1);
str = (void*)Strength; //de-ref type-punned pointer fix
if (*((uint32*)str)) // only if one of the 4 Strength bytes is != 0
{
QP = (MbP->QPy + MbQ->QPy + 1) >> 1; // Average QP of the two blocks;
indexA = QP + video->FilterOffsetA;
indexB = QP + video->FilterOffsetB;
indexA = qp_clip_tab[indexA]; // IClip(0, MAX_QP, QP+video->FilterOffsetA)
indexB = qp_clip_tab[indexB]; // IClip(0, MAX_QP, QP+video->FilterOffsetB)
Alpha = ALPHA_TABLE[indexA];
Beta = BETA_TABLE[indexB];
clipTable = (int *)CLIP_TAB[indexA];
if (Alpha > 0 && Beta > 0)
{
#ifdef USE_PRED_BLOCK
EdgeLoop_Luma_horizontal(SrcY, Strength, Alpha, Beta, clipTable, 20);
#else
EdgeLoop_Luma_horizontal(SrcY, Strength, Alpha, Beta, clipTable, pitch);
#endif
}
QPC = (MbP->QPc + MbQ->QPc + 1) >> 1;
indexA = QPC + video->FilterOffsetA;
indexB = QPC + video->FilterOffsetB;
indexA = qp_clip_tab[indexA]; // IClip(0, MAX_QP, QP+video->FilterOffsetA)
indexB = qp_clip_tab[indexB]; // IClip(0, MAX_QP, QP+video->FilterOffsetB)
Alpha = ALPHA_TABLE[indexA];
Beta = BETA_TABLE[indexB];
clipTable = (int *)CLIP_TAB[indexA];
if (Alpha > 0 && Beta > 0)
{
#ifdef USE_PRED_BLOCK
EdgeLoop_Chroma_horizontal(SrcU, Strength, Alpha, Beta, clipTable, 12);
EdgeLoop_Chroma_horizontal(SrcV, Strength, Alpha, Beta, clipTable, 12);
#else
EdgeLoop_Chroma_horizontal(SrcU, Strength, Alpha, Beta, clipTable, pitch >> 1);
EdgeLoop_Chroma_horizontal(SrcV, Strength, Alpha, Beta, clipTable, pitch >> 1);
#endif
}
}
} /* end of: if(filterTopMbEdgeFlag) */
/* 4.HORIZONTAL EDGE (no boundary), the edges are inside a MB */
MbP = MbQ;
/* Recover Alpha, Beta and clipTable for edge!=0 with the variables filterLeftMbEdgeFlag, mbNum and tmp */
/* Note that Alpha_c, Beta_c and clipTable_c for chroma is already calculated */
Alpha = filterLeftMbEdgeFlag;
Beta = mbNum;
clipTable = (int *)tmp;
GetStrength_HorizontalEdges(Strength + 4, MbQ); // Strength for 4 blks in 1 stripe, 0 => vertical edge
for (edge = 1; edge < 4; edge++) // 4 horicontal strips of 16 pel
{
//GetStrength(video, Strength, MbP, MbQ, 1, edge); // Strength for 4 blks in 1 stripe 1 => horizontal edge
if (*((int*)(Strength + (edge << 2)))) // only if one of the 4 Strength bytes is != 0
{
if (Alpha > 0 && Beta > 0)
{
#ifdef USE_PRED_BLOCK
EdgeLoop_Luma_horizontal(SrcY + (edge << 2)*20, Strength + (edge << 2), Alpha, Beta, clipTable, 20);
#else
EdgeLoop_Luma_horizontal(SrcY + (edge << 2)*pitch, Strength + (edge << 2), Alpha, Beta, clipTable, pitch);
#endif
}
if (!(edge & 1) && Alpha_c > 0 && Beta_c > 0)
{
#ifdef USE_PRED_BLOCK
EdgeLoop_Chroma_horizontal(SrcU + (edge << 1)*12, Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, 12);
EdgeLoop_Chroma_horizontal(SrcV + (edge << 1)*12, Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, 12);
#else
EdgeLoop_Chroma_horizontal(SrcU + (edge << 1)*(pitch >> 1), Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, pitch >> 1);
EdgeLoop_Chroma_horizontal(SrcV + (edge << 1)*(pitch >> 1), Strength + (edge << 2), Alpha_c, Beta_c, clipTable_c, pitch >> 1);
#endif
}
}
} //end edge
return;
}
/*
*****************************************************************************************************
* \brief returns a buffer of 4 Strength values for one stripe in a mb (for different Frame types)
*****************************************************************************************************
*/
void GetStrength_Edge0(uint8 *Strength, AVCMacroblock* MbP, AVCMacroblock* MbQ, int dir)
{
int tmp;
int16 *ptrQ, *ptrP;
void* vptr;
uint8 *pStrength;
void* refIdx;
if (MbP->mbMode == AVC_I4 || MbP->mbMode == AVC_I16 ||
MbQ->mbMode == AVC_I4 || MbQ->mbMode == AVC_I16)
{
*((int*)Strength) = ININT_STRENGTH[0]; // Start with Strength=3. or Strength=4 for Mb-edge
}
else // if not intra or SP-frame
{
*((int*)Strength) = 0;
if (dir == 0) // Vertical Edge 0
{
//1. Check the ref_frame_id
refIdx = (void*) MbQ->RefIdx; //de-ref type-punned pointer fix
ptrQ = (int16*)refIdx;
refIdx = (void*)MbP->RefIdx; //de-ref type-punned pointer fix
ptrP = (int16*)refIdx;
pStrength = Strength;
if (ptrQ[0] != ptrP[1]) pStrength[0] = 1;
if (ptrQ[2] != ptrP[3]) pStrength[2] = 1;
pStrength[1] = pStrength[0];
pStrength[3] = pStrength[2];
//2. Check the non-zero coeff blocks (4x4)
if (MbQ->nz_coeff[0] != 0 || MbP->nz_coeff[3] != 0) pStrength[0] = 2;
if (MbQ->nz_coeff[4] != 0 || MbP->nz_coeff[7] != 0) pStrength[1] = 2;
if (MbQ->nz_coeff[8] != 0 || MbP->nz_coeff[11] != 0) pStrength[2] = 2;
if (MbQ->nz_coeff[12] != 0 || MbP->nz_coeff[15] != 0) pStrength[3] = 2;
//3. Only need to check the mv difference
vptr = (void*)MbQ->mvL0; // for deref type-punned pointer
ptrQ = (int16*)vptr;
ptrP = (int16*)(MbP->mvL0 + 3); // points to 4x4 block #3 (the 4th column)
// 1st blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pStrength++;
ptrQ += 8;
ptrP += 8;
// 2nd blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pStrength++;
ptrQ += 8;
ptrP += 8;
// 3rd blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pStrength++;
ptrQ += 8;
ptrP += 8;
// 4th blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
}
else // Horizontal Edge 0
{
//1. Check the ref_frame_id
refIdx = (void*)MbQ->RefIdx; //de-ref type-punned pointer
ptrQ = (int16*)refIdx;
refIdx = (void*)MbP->RefIdx; //de-ref type-punned pointer
ptrP = (int16*)refIdx;
pStrength = Strength;
if (ptrQ[0] != ptrP[2]) pStrength[0] = 1;
if (ptrQ[1] != ptrP[3]) pStrength[2] = 1;
pStrength[1] = pStrength[0];
pStrength[3] = pStrength[2];
//2. Check the non-zero coeff blocks (4x4)
if (MbQ->nz_coeff[0] != 0 || MbP->nz_coeff[12] != 0) pStrength[0] = 2;
if (MbQ->nz_coeff[1] != 0 || MbP->nz_coeff[13] != 0) pStrength[1] = 2;
if (MbQ->nz_coeff[2] != 0 || MbP->nz_coeff[14] != 0) pStrength[2] = 2;
if (MbQ->nz_coeff[3] != 0 || MbP->nz_coeff[15] != 0) pStrength[3] = 2;
//3. Only need to check the mv difference
vptr = (void*)MbQ->mvL0;
ptrQ = (int16*)vptr;
ptrP = (int16*)(MbP->mvL0 + 12); // points to 4x4 block #12 (the 4th row)
// 1st blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pStrength++;
ptrQ += 2;
ptrP += 2;
// 2nd blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pStrength++;
ptrQ += 2;
ptrP += 2;
// 3rd blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pStrength++;
ptrQ += 2;
ptrP += 2;
// 4th blk
if (*pStrength == 0)
{
// check |mv difference| >= 4
tmp = *ptrQ++ - *ptrP++;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *ptrQ-- - *ptrP--;
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
} /* end of: else if(dir == 0) */
} /* end of: if( !(MbP->mbMode == AVC_I4 ...) */
}
void GetStrength_VerticalEdges(uint8 *Strength, AVCMacroblock* MbQ)
{
int idx, tmp;
int16 *ptr, *pmvx, *pmvy;
uint8 *pnz;
uint8 *pStrength, *pStr;
void* refIdx;
if (MbQ->mbMode == AVC_I4 || MbQ->mbMode == AVC_I16)
{
*((int*)Strength) = ININT_STRENGTH[1]; // Start with Strength=3. or Strength=4 for Mb-edge
*((int*)(Strength + 4)) = ININT_STRENGTH[2];
*((int*)(Strength + 8)) = ININT_STRENGTH[3];
}
else // Not intra or SP-frame
{
*((int*)Strength) = 0; // for non-intra MB, strength = 0, 1 or 2.
*((int*)(Strength + 4)) = 0;
*((int*)(Strength + 8)) = 0;
//1. Check the ref_frame_id
refIdx = (void*)MbQ->RefIdx; //de-ref type-punned pointer fix
ptr = (int16*)refIdx;
pStrength = Strength;
if (ptr[0] != ptr[1]) pStrength[4] = 1;
if (ptr[2] != ptr[3]) pStrength[6] = 1;
pStrength[5] = pStrength[4];
pStrength[7] = pStrength[6];
//2. Check the nz_coeff block and mv difference
pmvx = (int16*)(MbQ->mvL0 + 1); // points to 4x4 block #1,not #0
pmvy = pmvx + 1;
for (idx = 0; idx < 4; idx += 2) // unroll the loop, make 4 iterations to 2
{
// first/third row : 1,2,3 or 9,10,12
// Strength = 2 for a whole row
pnz = MbQ->nz_coeff + (idx << 2);
if (*pnz++ != 0) *pStrength = 2;
if (*pnz++ != 0)
{
*pStrength = 2;
*(pStrength + 4) = 2;
}
if (*pnz++ != 0)
{
*(pStrength + 4) = 2;
*(pStrength + 8) = 2;
}
if (*pnz != 0) *(pStrength + 8) = 2;
// Then Strength = 1
if (*pStrength == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *pmvy - *(pmvy - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pmvx += 2;
pmvy += 2;
pStr = pStrength + 4;
if (*pStr == 0)
{
//check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
pmvx += 2;
pmvy += 2;
pStr = pStrength + 8;
if (*pStr == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
// Second/fourth row: 5,6,7 or 14,15,16
// Strength = 2 for a whole row
pnz = MbQ->nz_coeff + ((idx + 1) << 2);
if (*pnz++ != 0) *(pStrength + 1) = 2;
if (*pnz++ != 0)
{
*(pStrength + 1) = 2;
*(pStrength + 5) = 2;
}
if (*pnz++ != 0)
{
*(pStrength + 5) = 2;
*(pStrength + 9) = 2;
}
if (*pnz != 0) *(pStrength + 9) = 2;
// Then Strength = 1
pmvx += 4;
pmvy += 4;
pStr = pStrength + 1;
if (*pStr == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
pmvx += 2;
pmvy += 2;
pStr = pStrength + 5;
if (*pStr == 0)
{
//check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
pmvx += 2;
pmvy += 2;
pStr = pStrength + 9;
if (*pStr == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 2);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
// update some variables for the next two rows
pmvx += 4;
pmvy += 4;
pStrength += 2;
} /* end of: for(idx=0; idx<2; idx++) */
} /* end of: else if( MbQ->mbMode == AVC_I4 ...) */
}
void GetStrength_HorizontalEdges(uint8 Strength[12], AVCMacroblock* MbQ)
{
int idx, tmp;
int16 *ptr, *pmvx, *pmvy;
uint8 *pStrength, *pStr;
void* refIdx;
if (MbQ->mbMode == AVC_I4 || MbQ->mbMode == AVC_I16)
{
*((int*)Strength) = ININT_STRENGTH[1]; // Start with Strength=3. or Strength=4 for Mb-edge
*((int*)(Strength + 4)) = ININT_STRENGTH[2];
*((int*)(Strength + 8)) = ININT_STRENGTH[3];
}
else // Not intra or SP-frame
{
*((int*)Strength) = 0; // for non-intra MB, strength = 0, 1 or 2.
*((int*)(Strength + 4)) = 0; // for non-intra MB, strength = 0, 1 or 2.
*((int*)(Strength + 8)) = 0; // for non-intra MB, strength = 0, 1 or 2.
//1. Check the ref_frame_id
refIdx = (void*) MbQ->RefIdx; // de-ref type-punned fix
ptr = (int16*) refIdx;
pStrength = Strength;
if (ptr[0] != ptr[2]) pStrength[4] = 1;
if (ptr[1] != ptr[3]) pStrength[6] = 1;
pStrength[5] = pStrength[4];
pStrength[7] = pStrength[6];
//2. Check the nz_coeff block and mv difference
pmvx = (int16*)(MbQ->mvL0 + 4); // points to 4x4 block #4,not #0
pmvy = pmvx + 1;
for (idx = 0; idx < 4; idx += 2) // unroll the loop, make 4 iterations to 2
{
// first/third row : 1,2,3 or 9,10,12
// Strength = 2 for a whole row
if (MbQ->nz_coeff[idx] != 0) *pStrength = 2;
if (MbQ->nz_coeff[4+idx] != 0)
{
*pStrength = 2;
*(pStrength + 4) = 2;
}
if (MbQ->nz_coeff[8+idx] != 0)
{
*(pStrength + 4) = 2;
*(pStrength + 8) = 2;
}
if (MbQ->nz_coeff[12+idx] != 0) *(pStrength + 8) = 2;
// Then Strength = 1
if (*pStrength == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
tmp = *pmvy - *(pmvy - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStrength = 1;
}
pmvx += 8;
pmvy += 8;
pStr = pStrength + 4;
if (*pStr == 0)
{
//check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
pmvx += 8;
pmvy += 8;
pStr = pStrength + 8;
if (*pStr == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
// Second/fourth row: 5,6,7 or 14,15,16
// Strength = 2 for a whole row
if (MbQ->nz_coeff[idx+1] != 0) *(pStrength + 1) = 2;
if (MbQ->nz_coeff[4+idx+1] != 0)
{
*(pStrength + 1) = 2;
*(pStrength + 5) = 2;
}
if (MbQ->nz_coeff[8+idx+1] != 0)
{
*(pStrength + 5) = 2;
*(pStrength + 9) = 2;
}
if (MbQ->nz_coeff[12+idx+1] != 0) *(pStrength + 9) = 2;
// Then Strength = 1
pmvx -= 14;
pmvy -= 14; // -14 = -16 + 2
pStr = pStrength + 1;
if (*pStr == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
pmvx += 8;
pmvy += 8;
pStr = pStrength + 5;
if (*pStr == 0)
{
//check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
pmvx += 8;
pmvy += 8;
pStr = pStrength + 9;
if (*pStr == 0)
{
//within the same 8x8 block, no need to check the reference id
//only need to check the |mv difference| >= 4
tmp = *pmvx - *(pmvx - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
tmp = *pmvy - *(pmvy - 8);
if (tmp < 0) tmp = -tmp;
if (tmp >= 4) *pStr = 1;
}
// update some variables for the next two rows
pmvx -= 14;
pmvy -= 14; // -14 = -16 + 2
pStrength += 2;
} /* end of: for(idx=0; idx<2; idx++) */
} /* end of: else if( MbQ->mbMode == AVC_I4 ...) */
}
/*
*****************************************************************************************
* \brief Filters one edge of 16 (luma) or 8 (chroma) pel
*****************************************************************************************
*/
void EdgeLoop_Luma_horizontal(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch)
{
int pel, ap = 0, aq = 0, Strng;
int C0, c0, dif, AbsDelta, tmp, tmp1;
int L2 = 0, L1, L0, R0, R1, R2 = 0, RL0;
if (Strength[0] == 4) /* INTRA strong filtering */
{
for (pel = 0; pel < 16; pel++)
{
R0 = SrcPtr[0];
R1 = SrcPtr[pitch];
L0 = SrcPtr[-pitch];
L1 = SrcPtr[-(pitch<<1)];
// |R0 - R1| < Beta
tmp1 = R0 - R1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp = (tmp1 - Beta);
//|L0 - L1| < Beta
tmp1 = L0 - L1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
//|R0 - L0| < Alpha
AbsDelta = R0 - L0;
if (AbsDelta < 0) AbsDelta = -AbsDelta;
tmp &= (AbsDelta - Alpha);
if (tmp < 0)
{
AbsDelta -= ((Alpha >> 2) + 2);
R2 = SrcPtr[pitch<<1]; //inc2
L2 = SrcPtr[-(pitch+(pitch<<1))]; // -inc3
// |R0 - R2| < Beta && |R0 - L0| < (Alpha/4 + 2)
tmp = R0 - R2;
if (tmp < 0) tmp = -tmp;
aq = AbsDelta & (tmp - Beta);
// |L0 - L2| < Beta && |R0 - L0| < (Alpha/4 + 2)
tmp = L0 - L2;
if (tmp < 0) tmp = -tmp;
ap = AbsDelta & (tmp - Beta);
if (aq < 0)
{
tmp = R1 + R0 + L0;
SrcPtr[0] = (L1 + (tmp << 1) + R2 + 4) >> 3;
tmp += R2;
SrcPtr[pitch] = (tmp + 2) >> 2;
SrcPtr[pitch<<1] = (((SrcPtr[(pitch+(pitch<<1))] + R2) << 1) + tmp + 4) >> 3;
}
else
SrcPtr[0] = ((R1 << 1) + R0 + L1 + 2) >> 2;
if (ap < 0)
{
tmp = L1 + R0 + L0;
SrcPtr[-pitch] = (R1 + (tmp << 1) + L2 + 4) >> 3;
tmp += L2;
SrcPtr[-(pitch<<1)] = (tmp + 2) >> 2;
SrcPtr[-(pitch+(pitch<<1))] = (((SrcPtr[-(pitch<<2)] + L2) << 1) + tmp + 4) >> 3;
}
else
SrcPtr[-pitch] = ((L1 << 1) + L0 + R1 + 2) >> 2;
} /* if(tmp < 0) */
SrcPtr ++; // Increment to next set of pixel
} /* end of: for(pel=0; pel<16; pel++) */
} /* if(Strength[0] == 4) */
else /* Normal filtering */
{
for (pel = 0; pel < 16; pel++)
{
Strng = Strength[pel >> 2];
if (Strng)
{
R0 = SrcPtr[0];
R1 = SrcPtr[pitch];
L0 = SrcPtr[-pitch];
L1 = SrcPtr[-(pitch<<1)]; // inc2
//|R0 - L0| < Alpha
tmp1 = R0 - L0;
if (tmp1 < 0) tmp1 = -tmp1;
tmp = (tmp1 - Alpha);
// |R0 - R1| < Beta
tmp1 = R0 - R1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
//|L0 - L1| < Beta
tmp1 = L0 - L1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
if (tmp < 0)
{
R2 = SrcPtr[pitch<<1]; //inc2
L2 = SrcPtr[-(pitch+(pitch<<1))]; // -inc3
// |R0 - R2| < Beta
tmp = R0 - R2;
if (tmp < 0) tmp = -tmp;
aq = tmp - Beta;
// |L0 - L2| < Beta
tmp = L0 - L2;
if (tmp < 0) tmp = -tmp;
ap = tmp - Beta;
c0 = C0 = clipTable[Strng];
if (ap < 0) c0++;
if (aq < 0) c0++;
//dif = IClip(-c0, c0, ((Delta << 2) + (L1 - R1) + 4) >> 3);
dif = (((R0 - L0) << 2) + (L1 - R1) + 4) >> 3;
tmp = dif + c0;
if ((uint)tmp > (uint)c0 << 1)
{
tmp = ~(tmp >> 31);
dif = (tmp & (c0 << 1)) - c0;
}
//SrcPtr[0] = (uint8)IClip(0, 255, R0 - dif);
//SrcPtr[-inc] = (uint8)IClip(0, 255, L0 + dif);
RL0 = R0 + L0;
R0 -= dif;
L0 += dif;
if ((uint)R0 > 255)
{
tmp = ~(R0 >> 31);
R0 = tmp & 255;
}
if ((uint)L0 > 255)
{
tmp = ~(L0 >> 31);
L0 = tmp & 255;
}
SrcPtr[-pitch] = L0;
SrcPtr[0] = R0;
if (C0 != 0) /* Multiple zeros in the clip tables */
{
if (aq < 0) // SrcPtr[inc] += IClip(-C0, C0,(R2 + ((RL0 + 1) >> 1) - (R1<<1)) >> 1);
{
R2 = (R2 + ((RL0 + 1) >> 1) - (R1 << 1)) >> 1;
tmp = R2 + C0;
if ((uint)tmp > (uint)C0 << 1)
{
tmp = ~(tmp >> 31);
R2 = (tmp & (C0 << 1)) - C0;
}
SrcPtr[pitch] += R2;
}
if (ap < 0) //SrcPtr[-inc2] += IClip(-C0, C0,(L2 + ((RL0 + 1) >> 1) - (L1<<1)) >> 1);
{
L2 = (L2 + ((RL0 + 1) >> 1) - (L1 << 1)) >> 1;
tmp = L2 + C0;
if ((uint)tmp > (uint)C0 << 1)
{
tmp = ~(tmp >> 31);
L2 = (tmp & (C0 << 1)) - C0;
}
SrcPtr[-(pitch<<1)] += L2;
}
}
} /* if(tmp < 0) */
} /* end of: if((Strng = Strength[pel >> 2])) */
SrcPtr ++; // Increment to next set of pixel
} /* for(pel=0; pel<16; pel++) */
} /* else if(Strength[0] == 4) */
}
void EdgeLoop_Luma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch)
{
int pel, ap = 1, aq = 1;
int C0, c0, dif, AbsDelta, Strng, tmp, tmp1;
int L2 = 0, L1, L0, R0, R1, R2 = 0;
uint8 *ptr, *ptr1;
register uint R_in, L_in;
uint R_out, L_out;
if (Strength[0] == 4) /* INTRA strong filtering */
{
for (pel = 0; pel < 16; pel++)
{
// Read 8 pels
R_in = *((uint *)SrcPtr); // R_in = {R3, R2, R1, R0}
L_in = *((uint *)(SrcPtr - 4)); // L_in = {L0, L1, L2, L3}
R1 = (R_in >> 8) & 0xff;
R0 = R_in & 0xff;
L0 = L_in >> 24;
L1 = (L_in >> 16) & 0xff;
// |R0 - R1| < Beta
tmp1 = (R_in & 0xff) - R1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp = (tmp1 - Beta);
//|L0 - L1| < Beta
tmp1 = (L_in >> 24) - L1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
//|R0 - L0| < Alpha
AbsDelta = (R_in & 0xff) - (L_in >> 24);
if (AbsDelta < 0) AbsDelta = -AbsDelta;
tmp &= (AbsDelta - Alpha);
if (tmp < 0)
{
AbsDelta -= ((Alpha >> 2) + 2);
R2 = (R_in >> 16) & 0xff;
L2 = (L_in >> 8) & 0xff;
// |R0 - R2| < Beta && |R0 - L0| < (Alpha/4 + 2)
tmp1 = (R_in & 0xff) - R2;
if (tmp1 < 0) tmp1 = -tmp1;
aq = AbsDelta & (tmp1 - Beta);
// |L0 - L2| < Beta && |R0 - L0| < (Alpha/4 + 2)
tmp1 = (L_in >> 24) - L2;
if (tmp1 < 0) tmp1 = -tmp1;
ap = AbsDelta & (tmp1 - Beta);
ptr = SrcPtr;
if (aq < 0)
{
R_out = (R_in >> 24) << 24; // Keep R3 at the fourth byte
tmp = R0 + L0 + R1;
R_out |= (((tmp << 1) + L1 + R2 + 4) >> 3);
tmp += R2;
R_out |= (((tmp + 2) >> 2) << 8);
tmp1 = ((R_in >> 24) + R2) << 1;
R_out |= (((tmp1 + tmp + 4) >> 3) << 16);
*((uint *)SrcPtr) = R_out;
}
else
*ptr = ((R1 << 1) + R0 + L1 + 2) >> 2;
if (ap < 0)
{
L_out = (L_in << 24) >> 24; // Keep L3 at the first byte
tmp = R0 + L0 + L1;
L_out |= ((((tmp << 1) + R1 + L2 + 4) >> 3) << 24);
tmp += L2;
L_out |= (((tmp + 2) >> 2) << 16);
tmp1 = ((L_in & 0xff) + L2) << 1;
L_out |= (((tmp1 + tmp + 4) >> 3) << 8);
*((uint *)(SrcPtr - 4)) = L_out;
}
else
*(--ptr) = ((L1 << 1) + L0 + R1 + 2) >> 2;
} /* if(tmp < 0) */
SrcPtr += pitch; // Increment to next set of pixel
} /* end of: for(pel=0; pel<16; pel++) */
} /* if(Strength[0] == 4) */
else /* Normal filtering */
{
for (pel = 0; pel < 16; pel++)
{
Strng = Strength[pel >> 2];
if (Strng)
{
// Read 8 pels
R_in = *((uint *)SrcPtr); // R_in = {R3, R2, R1, R0}
L_in = *((uint *)(SrcPtr - 4)); // L_in = {L0, L1, L2, L3}
R1 = (R_in >> 8) & 0xff;
R0 = R_in & 0xff;
L0 = L_in >> 24;
L1 = (L_in >> 16) & 0xff;
//|R0 - L0| < Alpha
tmp = R0 - L0;
if (tmp < 0) tmp = -tmp;
tmp -= Alpha;
// |R0 - R1| < Beta
tmp1 = R0 - R1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
//|L0 - L1| < Beta
tmp1 = L0 - L1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
if (tmp < 0)
{
L2 = SrcPtr[-3];
R2 = SrcPtr[2];
// |R0 - R2| < Beta
tmp = R0 - R2;
if (tmp < 0) tmp = -tmp;
aq = tmp - Beta;
// |L0 - L2| < Beta
tmp = L0 - L2;
if (tmp < 0) tmp = -tmp;
ap = tmp - Beta;
c0 = C0 = clipTable[Strng];
if (ap < 0) c0++;
if (aq < 0) c0++;
//dif = IClip(-c0, c0, ((Delta << 2) + (L1 - R1) + 4) >> 3);
dif = (((R0 - L0) << 2) + (L1 - R1) + 4) >> 3;
tmp = dif + c0;
if ((uint)tmp > (uint)c0 << 1)
{
tmp = ~(tmp >> 31);
dif = (tmp & (c0 << 1)) - c0;
}
ptr = SrcPtr;
ptr1 = SrcPtr - 1;
//SrcPtr[0] = (uint8)IClip(0, 255, R0 - dif);
//SrcPtr[-inc] = (uint8)IClip(0, 255, L0 + dif);
R_in = R0 - dif;
L_in = L0 + dif; /* cannot re-use R0 and L0 here */
if ((uint)R_in > 255)
{
tmp = ~((int)R_in >> 31);
R_in = tmp & 255;
}
if ((uint)L_in > 255)
{
tmp = ~((int)L_in >> 31);
L_in = tmp & 255;
}
*ptr1-- = L_in;
*ptr++ = R_in;
if (C0 != 0) // Multiple zeros in the clip tables
{
if (ap < 0) //SrcPtr[-inc2] += IClip(-C0, C0,(L2 + ((RL0 + 1) >> 1) - (L1<<1)) >> 1);
{
L2 = (L2 + ((R0 + L0 + 1) >> 1) - (L1 << 1)) >> 1;
tmp = L2 + C0;
if ((uint)tmp > (uint)C0 << 1)
{
tmp = ~(tmp >> 31);
L2 = (tmp & (C0 << 1)) - C0;
}
*ptr1 += L2;
}
if (aq < 0) // SrcPtr[inc] += IClip(-C0, C0,(R2 + ((RL0 + 1) >> 1) - (R1<<1)) >> 1);
{
R2 = (R2 + ((R0 + L0 + 1) >> 1) - (R1 << 1)) >> 1;
tmp = R2 + C0;
if ((uint)tmp > (uint)C0 << 1)
{
tmp = ~(tmp >> 31);
R2 = (tmp & (C0 << 1)) - C0;
}
*ptr += R2;
}
}
} /* if(tmp < 0) */
} /* end of: if((Strng = Strength[pel >> 2])) */
SrcPtr += pitch; // Increment to next set of pixel
} /* for(pel=0; pel<16; pel++) */
} /* else if(Strength[0] == 4) */
}
void EdgeLoop_Chroma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch)
{
int pel, Strng;
int c0, dif;
int L1, L0, R0, R1, tmp, tmp1;
uint8 *ptr;
uint R_in, L_in;
for (pel = 0; pel < 16; pel++)
{
Strng = Strength[pel>>2];
if (Strng)
{
// Read 8 pels
R_in = *((uint *)SrcPtr); // R_in = {R3, R2, R1, R0}
L_in = *((uint *)(SrcPtr - 4)); // L_in = {L0, L1, L2, L3}
R1 = (R_in >> 8) & 0xff;
R0 = R_in & 0xff;
L0 = L_in >> 24;
L1 = (L_in >> 16) & 0xff;
// |R0 - R1| < Beta
tmp1 = R0 - R1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp = (tmp1 - Beta);
//|L0 - L1| < Beta
tmp1 = L0 - L1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
//|R0 - L0| < Alpha
tmp1 = R0 - L0;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Alpha);
if (tmp < 0)
{
ptr = SrcPtr;
if (Strng == 4) /* INTRA strong filtering */
{
*ptr-- = ((R1 << 1) + R0 + L1 + 2) >> 2;
*ptr = ((L1 << 1) + L0 + R1 + 2) >> 2;
}
else /* normal filtering */
{
c0 = clipTable[Strng] + 1;
//dif = IClip(-c0, c0, ((Delta << 2) + (L1 - R1) + 4) >> 3);
dif = (((R0 - L0) << 2) + (L1 - R1) + 4) >> 3;
tmp = dif + c0;
if ((uint)tmp > (uint)c0 << 1)
{
tmp = ~(tmp >> 31);
dif = (tmp & (c0 << 1)) - c0;
}
//SrcPtr[0] = (uint8)IClip(0, 255, R0 - dif);
//SrcPtr[-inc] = (uint8)IClip(0, 255, L0 + dif);
L0 += dif;
R0 -= dif;
if ((uint)L0 > 255)
{
tmp = ~(L0 >> 31);
L0 = tmp & 255;
}
if ((uint)R0 > 255)
{
tmp = ~(R0 >> 31);
R0 = tmp & 255;
}
*ptr-- = R0;
*ptr = L0;
}
}
pel ++;
SrcPtr += pitch; // Increment to next set of pixel
} /* end of: if((Strng = Strength[pel >> 2])) */
else
{
pel += 3;
SrcPtr += (pitch << 1); //PtrInc << 1;
}
} /* end of: for(pel=0; pel<16; pel++) */
}
void EdgeLoop_Chroma_horizontal(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta, int *clipTable, int pitch)
{
int pel, Strng;
int c0, dif;
int L1, L0, R0, R1, tmp, tmp1;
for (pel = 0; pel < 16; pel++)
{
Strng = Strength[pel>>2];
if (Strng)
{
R0 = SrcPtr[0];
L0 = SrcPtr[-pitch];
L1 = SrcPtr[-(pitch<<1)]; //inc2
R1 = SrcPtr[pitch];
// |R0 - R1| < Beta
tmp1 = R0 - R1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp = (tmp1 - Beta);
//|L0 - L1| < Beta
tmp1 = L0 - L1;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Beta);
//|R0 - L0| < Alpha
tmp1 = R0 - L0;
if (tmp1 < 0) tmp1 = -tmp1;
tmp &= (tmp1 - Alpha);
if (tmp < 0)
{
if (Strng == 4) /* INTRA strong filtering */
{
SrcPtr[0] = ((R1 << 1) + R0 + L1 + 2) >> 2;
SrcPtr[-pitch] = ((L1 << 1) + L0 + R1 + 2) >> 2;
}
else /* normal filtering */
{
c0 = clipTable[Strng] + 1;
//dif = IClip(-c0, c0, ((Delta << 2) + (L1 - R1) + 4) >> 3);
dif = (((R0 - L0) << 2) + (L1 - R1) + 4) >> 3;
tmp = dif + c0;
if ((uint)tmp > (uint)c0 << 1)
{
tmp = ~(tmp >> 31);
dif = (tmp & (c0 << 1)) - c0;
}
//SrcPtr[-inc] = (uint8)IClip(0, 255, L0 + dif);
//SrcPtr[0] = (uint8)IClip(0, 255, R0 - dif);
L0 += dif;
R0 -= dif;
if ((uint)L0 > 255)
{
tmp = ~(L0 >> 31);
L0 = tmp & 255;
}
if ((uint)R0 > 255)
{
tmp = ~(R0 >> 31);
R0 = tmp & 255;
}
SrcPtr[0] = R0;
SrcPtr[-pitch] = L0;
}
}
pel ++;
SrcPtr ++; // Increment to next set of pixel
} /* end of: if((Strng = Strength[pel >> 2])) */
else
{
pel += 3;
SrcPtr += 2;
}
} /* end of: for(pel=0; pel<16; pel++) */
}
================================================
FILE: RtspCamera/jni/avc_h264/common/src/dpb.cpp
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
// xxx pa
#define LOG_TAG "dbp"
#include "android/log.h"
#define DPB_MEM_ATTR 0
AVCStatus InitDPB(AVCHandle *avcHandle, AVCCommonObj *video, int FrameHeightInMbs, int PicWidthInMbs, bool padding)
{
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "InitDPB(int FrameHeightInMbs <%d>, int PicWidthInMbs <%d>, bool padding <%d>)", FrameHeightInMbs, PicWidthInMbs, padding);
AVCDecPicBuffer *dpb = video->decPicBuf;
int level, framesize, num_fs;
void *userData = avcHandle->userData;
#ifndef PV_MEMORY_POOL
uint32 addr;
#endif
uint16 refIdx = 0;
level = video->currSeqParams->level_idc;
for (num_fs = 0; num_fs < MAX_FS; num_fs++)
{
dpb->fs[num_fs] = NULL;
}
framesize = (int)(((FrameHeightInMbs * PicWidthInMbs) << 7) * 3);
if (padding)
{
video->padded_size = (int)((((FrameHeightInMbs + 2) * (PicWidthInMbs + 2)) << 7) * 3) - framesize;
}
else
{
video->padded_size = 0;
}
#ifndef PV_MEMORY_POOL
if (dpb->decoded_picture_buffer)
{
avcHandle->CBAVC_Free(userData, (int)dpb->decoded_picture_buffer);
dpb->decoded_picture_buffer = NULL;
}
#endif
/* need to allocate one extra frame for current frame, DPB only defines for reference frames */
dpb->num_fs = (uint32)(MaxDPBX2[mapLev2Idx[level]] << 2) / (3 * FrameHeightInMbs * PicWidthInMbs) + 1;
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "InitDPB dpb->num_fs = %d", dpb->num_fs);
if (dpb->num_fs > MAX_FS)
{
dpb->num_fs = MAX_FS;
}
if (video->currSeqParams->num_ref_frames + 1 > (uint32)dpb->num_fs)
{
dpb->num_fs = video->currSeqParams->num_ref_frames + 1;
}
dpb->dpb_size = dpb->num_fs * (framesize + video->padded_size);
// dpb->dpb_size = (uint32)MaxDPBX2[mapLev2Idx[level]]*512 + framesize;
#ifndef PV_MEMORY_POOL
dpb->decoded_picture_buffer = (uint8*) avcHandle->CBAVC_Malloc(userData, dpb->dpb_size, 100/*DPB_MEM_ATTR*/);
if (dpb->decoded_picture_buffer == NULL || dpb->decoded_picture_buffer&0x3) // not word aligned
return AVC_MEMORY_FAIL;
#endif
dpb->used_size = 0;
num_fs = 0;
while (num_fs < dpb->num_fs)
{
/* fs is an array pointers to AVCDecPicture */
dpb->fs[num_fs] = (AVCFrameStore*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCFrameStore), 101/*DEFAULT_ATTR*/);
if (dpb->fs[num_fs] == NULL)
{
return AVC_MEMORY_FAIL;
}
#ifndef PV_MEMORY_POOL
/* assign the actual memory for Sl, Scb, Scr */
dpb->fs[num_fs]->base_dpb = dpb->decoded_picture_buffer + dpb->used_size;
#endif
dpb->fs[num_fs]->IsReference = 0;
dpb->fs[num_fs]->IsLongTerm = 0;
dpb->fs[num_fs]->IsOutputted = 3;
dpb->fs[num_fs]->frame.RefIdx = refIdx++; /* this value will remain unchanged through out the encoding session */
dpb->fs[num_fs]->frame.picType = AVC_FRAME;
dpb->fs[num_fs]->frame.isLongTerm = 0;
dpb->fs[num_fs]->frame.isReference = 0;
video->RefPicList0[num_fs] = &(dpb->fs[num_fs]->frame);
dpb->fs[num_fs]->frame.padded = 0;
dpb->used_size += (framesize + video->padded_size);
num_fs++;
}
return AVC_SUCCESS;
}
OSCL_EXPORT_REF AVCStatus AVCConfigureSequence(AVCHandle *avcHandle, AVCCommonObj *video, bool padding)
{
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "AVCConfigureSequence");
void *userData = avcHandle->userData;
AVCDecPicBuffer *dpb = video->decPicBuf;
int framesize, ii; /* size of one frame */
uint PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs, PicSizeInMapUnits;
uint num_fs;
/* derived variables from SPS */
PicWidthInMbs = video->currSeqParams->pic_width_in_mbs_minus1 + 1;
PicHeightInMapUnits = video->currSeqParams->pic_height_in_map_units_minus1 + 1 ;
FrameHeightInMbs = (2 - video->currSeqParams->frame_mbs_only_flag) * PicHeightInMapUnits ;
PicSizeInMapUnits = PicWidthInMbs * PicHeightInMapUnits ;
if (video->PicSizeInMapUnits != PicSizeInMapUnits || video->currSeqParams->level_idc != video->level_idc)
{
/* make sure you mark all the frames as unused for reference for flushing*/
for (ii = 0; ii < dpb->num_fs; ii++)
{
dpb->fs[ii]->IsReference = 0;
dpb->fs[ii]->IsOutputted |= 0x02;
}
num_fs = (uint32)(MaxDPBX2[(uint32)mapLev2Idx[video->currSeqParams->level_idc]] << 2) / (3 * PicSizeInMapUnits) + 1;
if (num_fs >= MAX_FS)
{
num_fs = MAX_FS;
}
#ifdef PV_MEMORY_POOL
if (padding)
{
avcHandle->CBAVC_DPBAlloc(avcHandle->userData,
PicSizeInMapUnits + ((PicWidthInMbs + 2) << 1) + (PicHeightInMapUnits << 1), num_fs);
}
else
{
avcHandle->CBAVC_DPBAlloc(avcHandle->userData, PicSizeInMapUnits, num_fs);
}
#endif
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "AVCConfigureSequence CleanUpDPB & InitDPB");
CleanUpDPB(avcHandle, video);
if (InitDPB(avcHandle, video, FrameHeightInMbs, PicWidthInMbs, padding) != AVC_SUCCESS)
{
return AVC_FAIL;
}
/* Allocate video->mblock upto PicSizeInMbs and populate the structure such as the neighboring MB pointers. */
framesize = (FrameHeightInMbs * PicWidthInMbs);
if (video->mblock)
{
avcHandle->CBAVC_Free(userData, (uint32)video->mblock);
video->mblock = NULL;
}
video->mblock = (AVCMacroblock*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCMacroblock) * framesize, DEFAULT_ATTR);
if (video->mblock == NULL)
{
return AVC_FAIL;
}
for (ii = 0; ii < framesize; ii++)
{
video->mblock[ii].slice_id = -1;
}
/* Allocate memory for intra prediction */
#ifdef MB_BASED_DEBLOCK
video->intra_pred_top = (uint8*) avcHandle->CBAVC_Malloc(userData, PicWidthInMbs << 4, FAST_MEM_ATTR);
if (video->intra_pred_top == NULL)
{
return AVC_FAIL;
}
video->intra_pred_top_cb = (uint8*) avcHandle->CBAVC_Malloc(userData, PicWidthInMbs << 3, FAST_MEM_ATTR);
if (video->intra_pred_top_cb == NULL)
{
return AVC_FAIL;
}
video->intra_pred_top_cr = (uint8*) avcHandle->CBAVC_Malloc(userData, PicWidthInMbs << 3, FAST_MEM_ATTR);
if (video->intra_pred_top_cr == NULL)
{
return AVC_FAIL;
}
#endif
/* Allocate slice group MAP map */
if (video->MbToSliceGroupMap)
{
avcHandle->CBAVC_Free(userData, (uint32)video->MbToSliceGroupMap);
video->MbToSliceGroupMap = NULL;
}
video->MbToSliceGroupMap = (int*) avcHandle->CBAVC_Malloc(userData, sizeof(uint) * PicSizeInMapUnits * 2, 7/*DEFAULT_ATTR*/);
if (video->MbToSliceGroupMap == NULL)
{
return AVC_FAIL;
}
video->PicSizeInMapUnits = PicSizeInMapUnits;
video->level_idc = video->currSeqParams->level_idc;
}
return AVC_SUCCESS;
}
OSCL_EXPORT_REF AVCStatus CleanUpDPB(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int ii;
void *userData = avcHandle->userData;
for (ii = 0; ii < MAX_FS; ii++)
{
if (dpb->fs[ii] != NULL)
{
avcHandle->CBAVC_Free(userData, (int)dpb->fs[ii]);
dpb->fs[ii] = NULL;
}
}
#ifndef PV_MEMORY_POOL
if (dpb->decoded_picture_buffer)
{
avcHandle->CBAVC_Free(userData, (int)dpb->decoded_picture_buffer);
dpb->decoded_picture_buffer = NULL;
}
#endif
dpb->used_size = 0;
dpb->dpb_size = 0;
return AVC_SUCCESS;
}
OSCL_EXPORT_REF AVCStatus DPBInitBuffer(AVCHandle *avcHandle, AVCCommonObj *video)
{
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "DPBInitBuffer");
AVCDecPicBuffer *dpb = video->decPicBuf;
int ii, status;
/* Before doing any decoding, check if there's a frame memory available */
/* look for next unused dpb->fs, or complementary field pair */
/* video->currPic is assigned to this */
/* There's also restriction on the frame_num, see page 59 of JVT-I1010.doc. */
for (ii = 0; ii < dpb->num_fs; ii++)
{
/* looking for the one not used or not reference and has been outputted */
if (dpb->fs[ii]->IsReference == 0 && dpb->fs[ii]->IsOutputted == 3)
{
video->currFS = dpb->fs[ii];
#ifdef PV_MEMORY_POOL
status = avcHandle->CBAVC_FrameBind(avcHandle->userData, ii, &(video->currFS->base_dpb));
if (status == AVC_FAIL)
{
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "DPBInitBuffer CBAVC_FrameBind-> return AVC_NO_BUFFER for fs: %d", ii);
return AVC_NO_BUFFER; /* this should not happen */
}
#endif
break;
}
}
if (ii == dpb->num_fs)
{
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "DPBInitBuffer return AVC_PICTURE_OUTPUT_READY");
return AVC_PICTURE_OUTPUT_READY; /* no empty frame available */
}
__android_log_print(ANDROID_LOG_INFO, LOG_TAG, "DPBInitBuffer final return AVC_SUCCESS");
return AVC_SUCCESS;
}
OSCL_EXPORT_REF void DPBInitPic(AVCCommonObj *video, int CurrPicNum)
{
int offset = 0;
int offsetc = 0;
int luma_framesize;
/* this part has to be set here, assuming that slice header and POC have been decoded. */
/* used in GetOutput API */
video->currFS->PicOrderCnt = video->PicOrderCnt;
video->currFS->FrameNum = video->sliceHdr->frame_num;
video->currFS->FrameNumWrap = CurrPicNum; // MC_FIX
/* initialize everything to zero */
video->currFS->IsOutputted = 0;
video->currFS->IsReference = 0;
video->currFS->IsLongTerm = 0;
video->currFS->frame.isReference = FALSE;
video->currFS->frame.isLongTerm = FALSE;
/* initialize the pixel pointer to NULL */
video->currFS->frame.Sl = video->currFS->frame.Scb = video->currFS->frame.Scr = NULL;
/* determine video->currPic */
/* assign dbp->base_dpb to fs[i]->frame.Sl, Scb, Scr .*/
/* For PicSizeInMbs, see DecodeSliceHeader() */
video->currPic = &(video->currFS->frame);
video->currPic->padded = 0; // reset this flag to not-padded
if (video->padded_size)
{
offset = ((video->PicWidthInSamplesL + 32) << 4) + 16; // offset to the origin
offsetc = (offset >> 2) + 4;
luma_framesize = (int)((((video->FrameHeightInMbs + 2) * (video->PicWidthInMbs + 2)) << 8));
}
else
luma_framesize = video->PicSizeInMbs << 8;
video->currPic->Sl = video->currFS->base_dpb + offset;
video->currPic->Scb = video->currFS->base_dpb + luma_framesize + offsetc;
video->currPic->Scr = video->currPic->Scb + (luma_framesize >> 2);
video->currPic->pitch = video->PicWidthInSamplesL + (video->padded_size == 0 ? 0 : 32);
video->currPic->height = video->PicHeightInSamplesL;
video->currPic->width = video->PicWidthInSamplesL;
video->currPic->PicNum = CurrPicNum;
}
/* to release skipped frame after encoding */
OSCL_EXPORT_REF void DPBReleaseCurrentFrame(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCDecPicBuffer *dpb = video->decPicBuf;
int ii;
video->currFS->IsOutputted = 3; // return this buffer.
#ifdef PV_MEMORY_POOL /* for non-memory pool, no need to do anything */
/* search for current frame index */
ii = dpb->num_fs;
while (ii--)
{
if (dpb->fs[ii] == video->currFS)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, ii);
break;
}
}
#endif
return ;
}
/* see subclause 8.2.5.1 */
OSCL_EXPORT_REF AVCStatus StorePictureInDPB(AVCHandle *avcHandle, AVCCommonObj *video)
{
AVCStatus status;
AVCDecPicBuffer *dpb = video->decPicBuf;
AVCSliceHeader *sliceHdr = video->sliceHdr;
int ii, num_ref;
/* number 1 of 8.2.5.1, we handle gaps in frame_num differently without using the memory */
/* to be done!!!! */
/* number 3 of 8.2.5.1 */
if (video->nal_unit_type == AVC_NALTYPE_IDR)
{
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii] != video->currFS) /* not current frame */
{
dpb->fs[ii]->IsReference = 0; /* mark as unused for reference */
dpb->fs[ii]->IsLongTerm = 0; /* but still used until output */
dpb->fs[ii]->IsOutputted |= 0x02;
#ifdef PV_MEMORY_POOL
if (dpb->fs[ii]->IsOutputted == 3)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, ii);
}
#endif
}
}
video->currPic->isReference = TRUE;
video->currFS->IsReference = 3;
if (sliceHdr->long_term_reference_flag == 0)
{
video->currPic->isLongTerm = FALSE;
video->currFS->IsLongTerm = 0;
video->MaxLongTermFrameIdx = -1;
}
else
{
video->currPic->isLongTerm = TRUE;
video->currFS->IsLongTerm = 3;
video->currFS->LongTermFrameIdx = 0;
video->MaxLongTermFrameIdx = 0;
}
if (sliceHdr->no_output_of_prior_pics_flag)
{
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii] != video->currFS) /* not current frame */
{
dpb->fs[ii]->IsOutputted = 3;
#ifdef PV_MEMORY_POOL
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, ii);
#endif
}
}
}
video->mem_mgr_ctrl_eq_5 = TRUE; /* flush reference frames MC_FIX */
}
else
{
if (video->currPic->isReference == TRUE)
{
if (sliceHdr->adaptive_ref_pic_marking_mode_flag == 0)
{
status = sliding_window_process(avcHandle, video, dpb); /* we may have to do this after adaptive_memory_marking */
}
else
{
status = adaptive_memory_marking(avcHandle, video, dpb, sliceHdr);
}
if (status != AVC_SUCCESS)
{
return status;
}
}
}
/* number 4 of 8.2.5.1 */
/* This basically says every frame must be at least used for short-term ref. */
/* Need to be revisited!!! */
/* look at insert_picture_in_dpb() */
if (video->nal_unit_type != AVC_NALTYPE_IDR && video->currPic->isLongTerm == FALSE)
{
if (video->currPic->isReference)
{
video->currFS->IsReference = 3;
}
else
{
video->currFS->IsReference = 0;
}
video->currFS->IsLongTerm = 0;
}
/* check if number of reference frames doesn't exceed num_ref_frames */
num_ref = 0;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference)
{
num_ref++;
}
}
if (num_ref > (int)video->currSeqParams->num_ref_frames)
{
return AVC_FAIL; /* out of range */
}
return AVC_SUCCESS;
}
AVCStatus sliding_window_process(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb)
{
int ii, numShortTerm, numLongTerm;
int32 MinFrameNumWrap;
int MinIdx;
numShortTerm = 0;
numLongTerm = 0;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii] != video->currFS) /* do not count the current frame */
{
if (dpb->fs[ii]->IsLongTerm)
{
numLongTerm++;
}
else if (dpb->fs[ii]->IsReference)
{
numShortTerm++;
}
}
}
/* Remove this check to allow certain corrupted content to pass. Can re-enable it if
it turns out to cause undesirable effect.
if (numShortTerm <= 0)
{
return AVC_FAIL;
} */
while (numShortTerm + numLongTerm >= (int)video->currSeqParams->num_ref_frames)
{
/* get short-term ref frame with smallest PicOrderCnt */
/* this doesn't work for all I-slice clip since PicOrderCnt will not be initialized */
MinFrameNumWrap = 0x7FFFFFFF;
MinIdx = -1;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference && !dpb->fs[ii]->IsLongTerm)
{
if (dpb->fs[ii]->FrameNumWrap < MinFrameNumWrap)
{
MinFrameNumWrap = dpb->fs[ii]->FrameNumWrap;
MinIdx = ii;
}
}
}
if (MinIdx < 0) /* something wrong, impossible */
{
return AVC_FAIL;
}
/* mark the frame with smallest PicOrderCnt to be unused for reference */
dpb->fs[MinIdx]->IsReference = 0;
dpb->fs[MinIdx]->IsLongTerm = 0;
dpb->fs[MinIdx]->frame.isReference = FALSE;
dpb->fs[MinIdx]->frame.isLongTerm = FALSE;
dpb->fs[MinIdx]->IsOutputted |= 0x02;
#ifdef PV_MEMORY_POOL
if (dpb->fs[MinIdx]->IsOutputted == 3)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, MinIdx);
}
#endif
numShortTerm--;
}
return AVC_SUCCESS;
}
/* see subclause 8.2.5.4 */
AVCStatus adaptive_memory_marking(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, AVCSliceHeader *sliceHdr)
{
int ii;
ii = 0;
while (ii < MAX_DEC_REF_PIC_MARKING && sliceHdr->memory_management_control_operation[ii] != 0)
{
switch (sliceHdr->memory_management_control_operation[ii])
{
case 1:
MemMgrCtrlOp1(avcHandle, video, dpb, sliceHdr->difference_of_pic_nums_minus1[ii]);
// update_ref_list(dpb);
break;
case 2:
MemMgrCtrlOp2(avcHandle, dpb, sliceHdr->long_term_pic_num[ii]);
break;
case 3:
MemMgrCtrlOp3(avcHandle, video, dpb, sliceHdr->difference_of_pic_nums_minus1[ii], sliceHdr->long_term_frame_idx[ii]);
break;
case 4:
MemMgrCtrlOp4(avcHandle, video, dpb, sliceHdr->max_long_term_frame_idx_plus1[ii]);
break;
case 5:
MemMgrCtrlOp5(avcHandle, video, dpb);
video->currFS->FrameNum = 0; //
video->currFS->PicOrderCnt = 0;
break;
case 6:
MemMgrCtrlOp6(avcHandle, video, dpb, sliceHdr->long_term_frame_idx[ii]);
break;
}
ii++;
}
if (ii == MAX_DEC_REF_PIC_MARKING)
{
return AVC_FAIL; /* exceed the limit */
}
return AVC_SUCCESS;
}
/* see subclause 8.2.5.4.1, mark short-term picture as "unused for reference" */
void MemMgrCtrlOp1(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, int difference_of_pic_nums_minus1)
{
int picNumX, ii;
picNumX = video->CurrPicNum - (difference_of_pic_nums_minus1 + 1);
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference == 3 && dpb->fs[ii]->IsLongTerm == 0)
{
if (dpb->fs[ii]->frame.PicNum == picNumX)
{
unmark_for_reference(avcHandle, dpb, ii);
return ;
}
}
}
return ;
}
/* see subclause 8.2.5.4.2 mark long-term picture as "unused for reference" */
void MemMgrCtrlOp2(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, int long_term_pic_num)
{
int ii;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsLongTerm == 3)
{
if (dpb->fs[ii]->frame.LongTermPicNum == long_term_pic_num)
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
}
}
/* see subclause 8.2.5.4.3 assign LongTermFrameIdx to a short-term ref picture */
void MemMgrCtrlOp3(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint difference_of_pic_nums_minus1,
uint long_term_frame_idx)
{
int picNumX, ii;
picNumX = video->CurrPicNum - (difference_of_pic_nums_minus1 + 1);
/* look for fs[i] with long_term_frame_idx */
unmark_long_term_frame_for_reference_by_frame_idx(avcHandle, dpb, long_term_frame_idx);
/* now mark the picture with picNumX to long term frame idx */
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsReference == 3)
{
if ((dpb->fs[ii]->frame.isLongTerm == FALSE) && (dpb->fs[ii]->frame.PicNum == picNumX))
{
dpb->fs[ii]->LongTermFrameIdx = long_term_frame_idx;
dpb->fs[ii]->frame.LongTermPicNum = long_term_frame_idx;
dpb->fs[ii]->frame.isLongTerm = TRUE;
dpb->fs[ii]->IsLongTerm = 3;
return;
}
}
}
}
/* see subclause 8.2.5.4.4, MaxLongTermFrameIdx */
void MemMgrCtrlOp4(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint max_long_term_frame_idx_plus1)
{
int ii;
video->MaxLongTermFrameIdx = max_long_term_frame_idx_plus1 - 1;
/* then mark long term frame with exceeding LongTermFrameIdx to unused for reference. */
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsLongTerm && dpb->fs[ii] != video->currFS)
{
if (dpb->fs[ii]->LongTermFrameIdx > video->MaxLongTermFrameIdx)
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
}
}
/* see subclause 8.2.5.4.5 mark all reference picture as "unused for reference" and setting
MaxLongTermFrameIdx to "no long-term frame indices" */
void MemMgrCtrlOp5(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb)
{
int ii;
video->MaxLongTermFrameIdx = -1;
for (ii = 0; ii < dpb->num_fs; ii++) /* including the current frame ??????*/
{
if (dpb->fs[ii] != video->currFS) // MC_FIX
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
video->mem_mgr_ctrl_eq_5 = TRUE;
}
/* see subclause 8.2.5.4.6 assing long-term frame index to the current picture */
void MemMgrCtrlOp6(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicBuffer *dpb, uint long_term_frame_idx)
{
unmark_long_term_frame_for_reference_by_frame_idx(avcHandle, dpb, long_term_frame_idx);
video->currFS->IsLongTerm = 3;
video->currFS->IsReference = 3;
video->currPic->isLongTerm = TRUE;
video->currPic->isReference = TRUE;
video->currFS->LongTermFrameIdx = long_term_frame_idx;
}
void unmark_for_reference(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint idx)
{
AVCFrameStore *fs = dpb->fs[idx];
fs->frame.isReference = FALSE;
fs->frame.isLongTerm = FALSE;
fs->IsLongTerm = 0;
fs->IsReference = 0;
fs->IsOutputted |= 0x02;
#ifdef PV_MEMORY_POOL
if (fs->IsOutputted == 3)
{
avcHandle->CBAVC_FrameUnbind(avcHandle->userData, idx);
}
#endif
return ;
}
void unmark_long_term_frame_for_reference_by_frame_idx(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, uint long_term_frame_idx)
{
int ii;
for (ii = 0; ii < dpb->num_fs; ii++)
{
if (dpb->fs[ii]->IsLongTerm && (dpb->fs[ii]->LongTermFrameIdx == (int)long_term_frame_idx))
{
unmark_for_reference(avcHandle, dpb, ii);
}
}
}
================================================
FILE: RtspCamera/jni/avc_h264/common/src/fmo.cpp
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
#include "oscl_mem.h"
/* see subclause 8.2.2 Decoding process for macroblock to slice group map */
OSCL_EXPORT_REF AVCStatus FMOInit(AVCCommonObj *video)
{
AVCPicParamSet *currPPS = video->currPicParams;
int *MbToSliceGroupMap = video->MbToSliceGroupMap;
int PicSizeInMapUnits = video->PicSizeInMapUnits;
int PicWidthInMbs = video->PicWidthInMbs;
if (currPPS->num_slice_groups_minus1 == 0)
{
oscl_memset(video->MbToSliceGroupMap, 0, video->PicSizeInMapUnits*sizeof(uint));
}
else
{
switch (currPPS->slice_group_map_type)
{
case 0:
FmoGenerateType0MapUnitMap(MbToSliceGroupMap, currPPS->run_length_minus1, currPPS->num_slice_groups_minus1, PicSizeInMapUnits);
break;
case 1:
FmoGenerateType1MapUnitMap(MbToSliceGroupMap, PicWidthInMbs, currPPS->num_slice_groups_minus1, PicSizeInMapUnits);
break;
case 2:
FmoGenerateType2MapUnitMap(currPPS, MbToSliceGroupMap, PicWidthInMbs, currPPS->num_slice_groups_minus1, PicSizeInMapUnits);
break;
case 3:
FmoGenerateType3MapUnitMap(video, currPPS, MbToSliceGroupMap, PicWidthInMbs);
break;
case 4:
FmoGenerateType4MapUnitMap(MbToSliceGroupMap, video->MapUnitsInSliceGroup0, currPPS->slice_group_change_direction_flag, PicSizeInMapUnits);
break;
case 5:
FmoGenerateType5MapUnitMap(MbToSliceGroupMap, video, currPPS->slice_group_change_direction_flag, PicSizeInMapUnits);
break;
case 6:
FmoGenerateType6MapUnitMap(MbToSliceGroupMap, (int*)currPPS->slice_group_id, PicSizeInMapUnits);
break;
default:
return AVC_FAIL; /* out of range, shouldn't come this far */
}
}
return AVC_SUCCESS;
}
/* see subclause 8.2.2.1 interleaved slice group map type*/
void FmoGenerateType0MapUnitMap(int *mapUnitToSliceGroupMap, uint *run_length_minus1, uint num_slice_groups_minus1, uint PicSizeInMapUnits)
{
uint iGroup, j;
uint i = 0;
do
{
for (iGroup = 0;
(iGroup <= num_slice_groups_minus1) && (i < PicSizeInMapUnits);
i += run_length_minus1[iGroup++] + 1)
{
for (j = 0; j <= run_length_minus1[ iGroup ] && i + j < PicSizeInMapUnits; j++)
mapUnitToSliceGroupMap[i+j] = iGroup;
}
}
while (i < PicSizeInMapUnits);
}
/* see subclause 8.2.2.2 dispersed slice group map type*/
void FmoGenerateType1MapUnitMap(int *mapUnitToSliceGroupMap, int PicWidthInMbs, uint num_slice_groups_minus1, uint PicSizeInMapUnits)
{
uint i;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[i] = ((i % PicWidthInMbs) + (((i / PicWidthInMbs) * (num_slice_groups_minus1 + 1)) / 2))
% (num_slice_groups_minus1 + 1);
}
}
/* see subclause 8.2.2.3 foreground with left-over slice group map type */
void FmoGenerateType2MapUnitMap(AVCPicParamSet *pps, int *mapUnitToSliceGroupMap, int PicWidthInMbs,
uint num_slice_groups_minus1, uint PicSizeInMapUnits)
{
int iGroup;
uint i, x, y;
uint yTopLeft, xTopLeft, yBottomRight, xBottomRight;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[ i ] = num_slice_groups_minus1;
}
for (iGroup = num_slice_groups_minus1 - 1 ; iGroup >= 0; iGroup--)
{
yTopLeft = pps->top_left[ iGroup ] / PicWidthInMbs;
xTopLeft = pps->top_left[ iGroup ] % PicWidthInMbs;
yBottomRight = pps->bottom_right[ iGroup ] / PicWidthInMbs;
xBottomRight = pps->bottom_right[ iGroup ] % PicWidthInMbs;
for (y = yTopLeft; y <= yBottomRight; y++)
{
for (x = xTopLeft; x <= xBottomRight; x++)
{
mapUnitToSliceGroupMap[ y * PicWidthInMbs + x ] = iGroup;
}
}
}
}
/* see subclause 8.2.2.4 box-out slice group map type */
/* follow the text rather than the JM, it's quite different. */
void FmoGenerateType3MapUnitMap(AVCCommonObj *video, AVCPicParamSet* pps, int *mapUnitToSliceGroupMap,
int PicWidthInMbs)
{
uint i, k;
int leftBound, topBound, rightBound, bottomBound;
int x, y, xDir, yDir;
int mapUnitVacant;
uint PicSizeInMapUnits = video->PicSizeInMapUnits;
uint MapUnitsInSliceGroup0 = video->MapUnitsInSliceGroup0;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[ i ] = 1;
}
x = (PicWidthInMbs - pps->slice_group_change_direction_flag) / 2;
y = (video->PicHeightInMapUnits - pps->slice_group_change_direction_flag) / 2;
leftBound = x;
topBound = y;
rightBound = x;
bottomBound = y;
xDir = pps->slice_group_change_direction_flag - 1;
yDir = pps->slice_group_change_direction_flag;
for (k = 0; k < MapUnitsInSliceGroup0; k += mapUnitVacant)
{
mapUnitVacant = (mapUnitToSliceGroupMap[ y * PicWidthInMbs + x ] == 1);
if (mapUnitVacant)
{
mapUnitToSliceGroupMap[ y * PicWidthInMbs + x ] = 0;
}
if (xDir == -1 && x == leftBound)
{
leftBound = AVC_MAX(leftBound - 1, 0);
x = leftBound;
xDir = 0;
yDir = 2 * pps->slice_group_change_direction_flag - 1;
}
else if (xDir == 1 && x == rightBound)
{
rightBound = AVC_MIN(rightBound + 1, (int)PicWidthInMbs - 1);
x = rightBound;
xDir = 0;
yDir = 1 - 2 * pps->slice_group_change_direction_flag;
}
else if (yDir == -1 && y == topBound)
{
topBound = AVC_MAX(topBound - 1, 0);
y = topBound;
xDir = 1 - 2 * pps->slice_group_change_direction_flag;
yDir = 0;
}
else if (yDir == 1 && y == bottomBound)
{
bottomBound = AVC_MIN(bottomBound + 1, (int)video->PicHeightInMapUnits - 1);
y = bottomBound;
xDir = 2 * pps->slice_group_change_direction_flag - 1;
yDir = 0;
}
else
{
x = x + xDir;
y = y + yDir;
}
}
}
/* see subclause 8.2.2.5 raster scan slice group map types */
void FmoGenerateType4MapUnitMap(int *mapUnitToSliceGroupMap, int MapUnitsInSliceGroup0, int slice_group_change_direction_flag, uint PicSizeInMapUnits)
{
uint sizeOfUpperLeftGroup = slice_group_change_direction_flag ? (PicSizeInMapUnits - MapUnitsInSliceGroup0) : MapUnitsInSliceGroup0;
uint i;
for (i = 0; i < PicSizeInMapUnits; i++)
if (i < sizeOfUpperLeftGroup)
mapUnitToSliceGroupMap[ i ] = 1 - slice_group_change_direction_flag;
else
mapUnitToSliceGroupMap[ i ] = slice_group_change_direction_flag;
}
/* see subclause 8.2.2.6, wipe slice group map type. */
void FmoGenerateType5MapUnitMap(int *mapUnitToSliceGroupMap, AVCCommonObj *video,
int slice_group_change_direction_flag, uint PicSizeInMapUnits)
{
int PicWidthInMbs = video->PicWidthInMbs;
int PicHeightInMapUnits = video->PicHeightInMapUnits;
int MapUnitsInSliceGroup0 = video->MapUnitsInSliceGroup0;
int sizeOfUpperLeftGroup = slice_group_change_direction_flag ? (PicSizeInMapUnits - MapUnitsInSliceGroup0) : MapUnitsInSliceGroup0;
int i, j, k = 0;
for (j = 0; j < PicWidthInMbs; j++)
{
for (i = 0; i < PicHeightInMapUnits; i++)
{
if (k++ < sizeOfUpperLeftGroup)
{
mapUnitToSliceGroupMap[ i * PicWidthInMbs + j ] = 1 - slice_group_change_direction_flag;
}
else
{
mapUnitToSliceGroupMap[ i * PicWidthInMbs + j ] = slice_group_change_direction_flag;
}
}
}
}
/* see subclause 8.2.2.7, explicit slice group map */
void FmoGenerateType6MapUnitMap(int *mapUnitToSliceGroupMap, int *slice_group_id, uint PicSizeInMapUnits)
{
uint i;
for (i = 0; i < PicSizeInMapUnits; i++)
{
mapUnitToSliceGroupMap[i] = slice_group_id[i];
}
}
================================================
FILE: RtspCamera/jni/avc_h264/common/src/mb_access.cpp
================================================
/* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
#include "avclib_common.h"
#include "oscl_mem.h"
OSCL_EXPORT_REF void InitNeighborAvailability(AVCCommonObj *video, int mbNum)
{
int PicWidthInMbs = video->PicWidthInMbs;
// do frame-only and postpone intraAvail calculattion
video->mbAddrA = mbNum - 1;
video->mbAddrB = mbNum - PicWidthInMbs;
video->mbAddrC = mbNum - PicWidthInMbs + 1;
video->mbAddrD = mbNum - PicWidthInMbs - 1;
video->mbAvailA = video->mbAvailB = video->mbAvailC = video->mbAvailD = 0;
if (video->mb_x)
{
video->mbAvailA = (video->mblock[video->mbAddrA].slice_id == video->currMB->slice_id);
if (video->mb_y)
{
video->mbAvailD = (video->mblock[video->mbAddrD].slice_id == video->currMB->slice_id);
}
}
if (video->mb_y)
{
video->mbAvailB = (video->mblock[video->mbAddrB].slice_id == video->currMB->slice_id);
if (video->mb_x < (PicWidthInMbs - 1))
{
video->mbAvailC = (video->mblock[video->mbAddrC].slice_id == video->currMB->slice_id);
}
}
return ;
}
bool mb_is_available(AVCMacroblock *mblock, uint PicSizeInMbs, int mbAddr, int currMbAddr)
{
if (mbAddr < 0 || mbAddr >= (int)PicSizeInMbs)
{
return FALSE;
}
if (mblock[mbAddr].slice_id != mblock[currMbAddr].slice_id)
{
return FALSE;
}
return TRUE;
}
gitextract_nchk1d3j/
├── README.md
├── RtspCamera/
│ ├── .classpath
│ ├── .gitignore
│ ├── .project
│ ├── .settings/
│ │ └── org.jboss.ide.eclipse.as.core.prefs
│ ├── AndroidManifest.xml
│ ├── docs/
│ │ └── wire-udp4000-h264-with-inband-sps.pps-nativeencoder.pcap
│ ├── gpl.txt
│ ├── jni/
│ │ ├── Android.mk
│ │ ├── Application.mk
│ │ ├── avc_h264/
│ │ │ ├── Android.mk
│ │ │ ├── common/
│ │ │ │ ├── include/
│ │ │ │ │ ├── avcapi_common.h
│ │ │ │ │ ├── avcint_common.h
│ │ │ │ │ └── avclib_common.h
│ │ │ │ └── src/
│ │ │ │ ├── deblock.cpp
│ │ │ │ ├── dpb.cpp
│ │ │ │ ├── fmo.cpp
│ │ │ │ ├── mb_access.cpp
│ │ │ │ └── reflist.cpp
│ │ │ ├── dec/
│ │ │ │ ├── Android.mk
│ │ │ │ ├── include/
│ │ │ │ │ ├── avcdec_api.h
│ │ │ │ │ ├── pvavcdecoder.h
│ │ │ │ │ ├── pvavcdecoder_factory.h
│ │ │ │ │ └── pvavcdecoderinterface.h
│ │ │ │ └── src/
│ │ │ │ ├── 3GPVideoParser.cpp
│ │ │ │ ├── 3GPVideoParser.h
│ │ │ │ ├── NativeH264Decoder.cpp
│ │ │ │ ├── NativeH264Decoder.h
│ │ │ │ ├── avc_bitstream.cpp
│ │ │ │ ├── avcdec_api.cpp
│ │ │ │ ├── avcdec_bitstream.h
│ │ │ │ ├── avcdec_int.h
│ │ │ │ ├── avcdec_lib.h
│ │ │ │ ├── header.cpp
│ │ │ │ ├── itrans.cpp
│ │ │ │ ├── pred_inter.cpp
│ │ │ │ ├── pred_intra.cpp
│ │ │ │ ├── pvavcdecoder.cpp
│ │ │ │ ├── pvavcdecoder_factory.cpp
│ │ │ │ ├── residual.cpp
│ │ │ │ ├── slice.cpp
│ │ │ │ ├── vlc.cpp
│ │ │ │ ├── yuv2rgb.cpp
│ │ │ │ └── yuv2rgb.h
│ │ │ ├── enc/
│ │ │ │ ├── Android.mk
│ │ │ │ ├── include/
│ │ │ │ │ ├── pvavcencoder.h
│ │ │ │ │ ├── pvavcencoder_factory.h
│ │ │ │ │ └── pvavcencoderinterface.h
│ │ │ │ └── src/
│ │ │ │ ├── NativeH264Encoder.cpp
│ │ │ │ ├── NativeH264Encoder.cpp__orig
│ │ │ │ ├── NativeH264Encoder.h
│ │ │ │ ├── avcenc_api.cpp
│ │ │ │ ├── avcenc_api.h
│ │ │ │ ├── avcenc_int.h
│ │ │ │ ├── avcenc_lib.h
│ │ │ │ ├── bitstream_io.cpp
│ │ │ │ ├── block.cpp
│ │ │ │ ├── findhalfpel.cpp
│ │ │ │ ├── header.cpp
│ │ │ │ ├── init.cpp
│ │ │ │ ├── intra_est.cpp
│ │ │ │ ├── motion_comp.cpp
│ │ │ │ ├── motion_est.cpp
│ │ │ │ ├── pvavcencoder.cpp
│ │ │ │ ├── pvavcencoder_factory.cpp
│ │ │ │ ├── rate_control.cpp
│ │ │ │ ├── residual.cpp
│ │ │ │ ├── sad.cpp
│ │ │ │ ├── sad_halfpel.cpp
│ │ │ │ ├── sad_halfpel_inline.h
│ │ │ │ ├── sad_inline.h
│ │ │ │ ├── sad_mb_offset.h
│ │ │ │ ├── slice.cpp
│ │ │ │ └── vlc_encode.cpp
│ │ │ └── oscl/
│ │ │ ├── oscl_base.h
│ │ │ ├── oscl_base_macros.h
│ │ │ ├── oscl_config.h
│ │ │ ├── oscl_dll.h
│ │ │ ├── oscl_error.h
│ │ │ ├── oscl_error_codes.h
│ │ │ ├── oscl_exception.h
│ │ │ ├── oscl_math.h
│ │ │ ├── oscl_mem.h
│ │ │ ├── oscl_string.h
│ │ │ ├── oscl_types.h
│ │ │ └── osclconfig_compiler_warnings.h
│ │ └── m4v_h263/
│ │ ├── Android.mk
│ │ ├── dec/
│ │ │ ├── Android.mk
│ │ │ ├── include/
│ │ │ │ ├── mp4dec_api.h
│ │ │ │ ├── pvm4vdecoder.h
│ │ │ │ ├── pvm4vdecoder_dpi.h
│ │ │ │ ├── pvm4vdecoder_factory.h
│ │ │ │ ├── pvvideodecoderinterface.h
│ │ │ │ └── visual_header.h
│ │ │ ├── oscl/
│ │ │ │ ├── oscl_base.h
│ │ │ │ ├── oscl_base_macros.h
│ │ │ │ ├── oscl_config.h
│ │ │ │ ├── oscl_dll.h
│ │ │ │ ├── oscl_error.h
│ │ │ │ ├── oscl_error_codes.h
│ │ │ │ ├── oscl_exception.h
│ │ │ │ ├── oscl_math.h
│ │ │ │ ├── oscl_mem.h
│ │ │ │ ├── oscl_types.h
│ │ │ │ └── osclconfig_compiler_warnings.h
│ │ │ └── src/
│ │ │ ├── 3GPVideoParser.cpp
│ │ │ ├── 3GPVideoParser.h
│ │ │ ├── adaptive_smooth_no_mmx.cpp
│ │ │ ├── bitstream.cpp
│ │ │ ├── bitstream.h
│ │ │ ├── block_idct.cpp
│ │ │ ├── cal_dc_scaler.cpp
│ │ │ ├── chv_filter.cpp
│ │ │ ├── chvr_filter.cpp
│ │ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.cpp
│ │ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.h
│ │ │ ├── combined_decode.cpp
│ │ │ ├── conceal.cpp
│ │ │ ├── datapart_decode.cpp
│ │ │ ├── dcac_prediction.cpp
│ │ │ ├── dec_pred_intra_dc.cpp
│ │ │ ├── deringing_chroma.cpp
│ │ │ ├── deringing_luma.cpp
│ │ │ ├── find_min_max.cpp
│ │ │ ├── get_pred_adv_b_add.cpp
│ │ │ ├── get_pred_outside.cpp
│ │ │ ├── idct.cpp
│ │ │ ├── idct.h
│ │ │ ├── idct_vca.cpp
│ │ │ ├── max_level.h
│ │ │ ├── mb_motion_comp.cpp
│ │ │ ├── mb_utils.cpp
│ │ │ ├── mbtype_mode.h
│ │ │ ├── motion_comp.h
│ │ │ ├── mp4dec_lib.h
│ │ │ ├── mp4def.h
│ │ │ ├── mp4lib_int.h
│ │ │ ├── packet_util.cpp
│ │ │ ├── post_filter.cpp
│ │ │ ├── post_proc.h
│ │ │ ├── post_proc_semaphore.cpp
│ │ │ ├── pp_semaphore_chroma_inter.cpp
│ │ │ ├── pp_semaphore_luma.cpp
│ │ │ ├── pvdec_api.cpp
│ │ │ ├── pvm4vdecoder.cpp
│ │ │ ├── pvm4vdecoder_factory.cpp
│ │ │ ├── scaling.h
│ │ │ ├── scaling_tab.cpp
│ │ │ ├── vlc_dec_tab.h
│ │ │ ├── vlc_decode.cpp
│ │ │ ├── vlc_decode.h
│ │ │ ├── vlc_dequant.cpp
│ │ │ ├── vlc_tab.cpp
│ │ │ ├── vop.cpp
│ │ │ ├── yuv2rgb.cpp
│ │ │ ├── yuv2rgb.h
│ │ │ ├── zigzag.h
│ │ │ └── zigzag_tab.cpp
│ │ └── enc/
│ │ ├── Android.mk
│ │ ├── include/
│ │ │ ├── cvei.h
│ │ │ ├── mp4enc_api.h
│ │ │ └── pvm4vencoder.h
│ │ ├── oscl/
│ │ │ ├── oscl_base.h
│ │ │ ├── oscl_base_macros.h
│ │ │ ├── oscl_config.h
│ │ │ ├── oscl_dll.h
│ │ │ ├── oscl_error.h
│ │ │ ├── oscl_error_codes.h
│ │ │ ├── oscl_exception.h
│ │ │ ├── oscl_math.h
│ │ │ ├── oscl_mem.h
│ │ │ ├── oscl_types.h
│ │ │ └── osclconfig_compiler_warnings.h
│ │ └── src/
│ │ ├── bitstream_io.cpp
│ │ ├── bitstream_io.h
│ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.cpp
│ │ ├── com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.h
│ │ ├── combined_encode.cpp
│ │ ├── datapart_encode.cpp
│ │ ├── dct.cpp
│ │ ├── dct.h
│ │ ├── dct_inline.h
│ │ ├── fastcodemb.cpp
│ │ ├── fastcodemb.h
│ │ ├── fastidct.cpp
│ │ ├── fastquant.cpp
│ │ ├── fastquant_inline.h
│ │ ├── findhalfpel.cpp
│ │ ├── m4venc_oscl.h
│ │ ├── me_utils.cpp
│ │ ├── motion_comp.cpp
│ │ ├── motion_est.cpp
│ │ ├── mp4def.h
│ │ ├── mp4enc_api.cpp
│ │ ├── mp4enc_api.cpp.original
│ │ ├── mp4enc_lib.h
│ │ ├── mp4lib_int.h
│ │ ├── pvm4vencoder.cpp
│ │ ├── rate_control.cpp
│ │ ├── rate_control.h
│ │ ├── sad.cpp
│ │ ├── sad_halfpel.cpp
│ │ ├── sad_halfpel_inline.h
│ │ ├── sad_inline.h
│ │ ├── sad_mb_offset.h
│ │ ├── vlc_enc_tab.h
│ │ ├── vlc_encode.cpp
│ │ ├── vlc_encode.h
│ │ ├── vlc_encode_inline.h
│ │ └── vop.cpp
│ ├── proguard.cfg
│ ├── project.properties
│ ├── res/
│ │ ├── layout/
│ │ │ ├── cameraapicodecs.xml
│ │ │ └── cameranativecodecs.xml
│ │ └── values/
│ │ └── strings.xml
│ └── src/
│ ├── com/
│ │ └── orangelabs/
│ │ └── rcs/
│ │ ├── core/
│ │ │ ├── CoreException.java
│ │ │ └── ims/
│ │ │ └── protocol/
│ │ │ └── rtp/
│ │ │ ├── CodecChain.java
│ │ │ ├── MediaRegistry.java
│ │ │ ├── MediaRtpReceiver.java
│ │ │ ├── Processor.java
│ │ │ ├── RtpException.java
│ │ │ ├── codec/
│ │ │ │ ├── Codec.java
│ │ │ │ └── video/
│ │ │ │ ├── VideoCodec.java
│ │ │ │ ├── h263/
│ │ │ │ │ ├── H263Config.java
│ │ │ │ │ ├── H263RtpHeader.java
│ │ │ │ │ ├── JavaDepacketizer.java
│ │ │ │ │ ├── JavaPacketizer.java
│ │ │ │ │ ├── decoder/
│ │ │ │ │ │ ├── NativeH263Decoder.java
│ │ │ │ │ │ └── VideoSample.java
│ │ │ │ │ └── encoder/
│ │ │ │ │ ├── NativeH263Encoder.java
│ │ │ │ │ └── NativeH263EncoderParams.java
│ │ │ │ └── h264/
│ │ │ │ ├── H264Config.java
│ │ │ │ ├── decoder/
│ │ │ │ │ └── NativeH264Decoder.java
│ │ │ │ └── encoder/
│ │ │ │ └── NativeH264Encoder.java
│ │ │ ├── core/
│ │ │ │ ├── RtcpAppPacket.java
│ │ │ │ ├── RtcpByePacket.java
│ │ │ │ ├── RtcpCompoundPacket.java
│ │ │ │ ├── RtcpPacket.java
│ │ │ │ ├── RtcpPacketReceiver.java
│ │ │ │ ├── RtcpPacketTransmitter.java
│ │ │ │ ├── RtcpPacketUtils.java
│ │ │ │ ├── RtcpReceiverReportPacket.java
│ │ │ │ ├── RtcpReport.java
│ │ │ │ ├── RtcpSdesBlock.java
│ │ │ │ ├── RtcpSdesItem.java
│ │ │ │ ├── RtcpSdesPacket.java
│ │ │ │ ├── RtcpSenderReportPacket.java
│ │ │ │ ├── RtcpSession.java
│ │ │ │ ├── RtcpStatisticsReceiver.java
│ │ │ │ ├── RtcpStatisticsTransmitter.java
│ │ │ │ ├── RtpPacket.java
│ │ │ │ ├── RtpPacketReceiver.java
│ │ │ │ ├── RtpPacketTransmitter.java
│ │ │ │ ├── RtpSource.java
│ │ │ │ ├── RtpStatisticsReceiver.java
│ │ │ │ └── RtpStatisticsTransmitter.java
│ │ │ ├── event/
│ │ │ │ ├── RtcpApplicationEvent.java
│ │ │ │ ├── RtcpByeEvent.java
│ │ │ │ ├── RtcpEvent.java
│ │ │ │ ├── RtcpEventListener.java
│ │ │ │ ├── RtcpReceiverReportEvent.java
│ │ │ │ ├── RtcpSdesEvent.java
│ │ │ │ └── RtcpSenderReportEvent.java
│ │ │ ├── format/
│ │ │ │ ├── DummyFormat.java
│ │ │ │ ├── Format.java
│ │ │ │ ├── audio/
│ │ │ │ │ ├── AudioFormat.java
│ │ │ │ │ └── PcmuAudioFormat.java
│ │ │ │ └── video/
│ │ │ │ ├── H263VideoFormat.java
│ │ │ │ ├── H264VideoFormat.java
│ │ │ │ └── VideoFormat.java
│ │ │ ├── media/
│ │ │ │ ├── MediaException.java
│ │ │ │ ├── MediaInput.java
│ │ │ │ ├── MediaOutput.java
│ │ │ │ └── MediaSample.java
│ │ │ ├── stream/
│ │ │ │ ├── DummyPacketSourceStream.java
│ │ │ │ ├── MediaCaptureStream.java
│ │ │ │ ├── MediaRendererStream.java
│ │ │ │ ├── ProcessorInputStream.java
│ │ │ │ ├── ProcessorOutputStream.java
│ │ │ │ └── RtpInputStream.java
│ │ │ └── util/
│ │ │ ├── Buffer.java
│ │ │ ├── Packet.java
│ │ │ └── SystemTimeBase.java
│ │ ├── platform/
│ │ │ ├── AndroidFactory.java
│ │ │ ├── FactoryException.java
│ │ │ ├── file/
│ │ │ │ ├── FileDescription.java
│ │ │ │ └── FileFactory.java
│ │ │ ├── logger/
│ │ │ │ └── AndroidAppender.java
│ │ │ ├── network/
│ │ │ │ ├── AndroidDatagramConnection.java
│ │ │ │ ├── AndroidHttpConnection.java
│ │ │ │ ├── AndroidNetworkFactory.java
│ │ │ │ ├── AndroidSocketConnection.java
│ │ │ │ ├── AndroidSocketServerConnection.java
│ │ │ │ ├── DatagramConnection.java
│ │ │ │ ├── HttpConnection.java
│ │ │ │ ├── NetworkFactory.java
│ │ │ │ ├── SocketConnection.java
│ │ │ │ └── SocketServerConnection.java
│ │ │ └── registry/
│ │ │ ├── AndroidRegistryFactory.java
│ │ │ └── RegistryFactory.java
│ │ ├── provider/
│ │ │ └── settings/
│ │ │ ├── RcsSettings.java
│ │ │ └── RcsSettingsData.java
│ │ ├── service/
│ │ │ └── api/
│ │ │ └── client/
│ │ │ ├── capability/
│ │ │ │ └── Capabilities.java
│ │ │ └── media/
│ │ │ ├── IMediaEventListener.aidl
│ │ │ ├── IMediaPlayer.aidl
│ │ │ ├── IMediaRenderer.aidl
│ │ │ ├── MediaCodec.aidl
│ │ │ ├── MediaCodec.java
│ │ │ └── video/
│ │ │ ├── VideoCodec.java
│ │ │ └── VideoSurfaceView.java
│ │ └── utils/
│ │ ├── FifoBuffer.java
│ │ ├── NetworkRessourceManager.java
│ │ └── logger/
│ │ ├── Appender.java
│ │ └── Logger.java
│ └── de/
│ └── kp/
│ ├── net/
│ │ ├── rtp/
│ │ │ ├── RtpPacket.java
│ │ │ ├── RtpRandom.java
│ │ │ ├── RtpSender.java
│ │ │ ├── RtpSocket.java
│ │ │ ├── packetizer/
│ │ │ │ ├── AbstractPacketizer.java
│ │ │ │ ├── H263Packetizer.java
│ │ │ │ ├── H264Fifo.java
│ │ │ │ └── H264Packetizer.java
│ │ │ ├── recorder/
│ │ │ │ ├── MediaRtpSender.java
│ │ │ │ └── RtspVideoRecorder.java
│ │ │ ├── stream/
│ │ │ │ └── RtpOutputStream.java
│ │ │ └── viewer/
│ │ │ └── RtpVideoRenderer.java
│ │ └── rtsp/
│ │ ├── RtspConstants.java
│ │ ├── client/
│ │ │ ├── RtspClient.java
│ │ │ ├── RtspControl.java
│ │ │ ├── api/
│ │ │ │ ├── EntityMessage.java
│ │ │ │ ├── Message.java
│ │ │ │ ├── MessageFactory.java
│ │ │ │ ├── Request.java
│ │ │ │ ├── RequestListener.java
│ │ │ │ ├── Response.java
│ │ │ │ ├── Transport.java
│ │ │ │ └── TransportListener.java
│ │ │ ├── header/
│ │ │ │ ├── CSeqHeader.java
│ │ │ │ ├── ContentEncodingHeader.java
│ │ │ │ ├── ContentLengthHeader.java
│ │ │ │ ├── ContentTypeHeader.java
│ │ │ │ ├── RtspBaseIntegerHeader.java
│ │ │ │ ├── RtspBaseStringHeader.java
│ │ │ │ ├── RtspContent.java
│ │ │ │ ├── RtspHeader.java
│ │ │ │ ├── SessionHeader.java
│ │ │ │ └── TransportHeader.java
│ │ │ ├── message/
│ │ │ │ ├── MessageBuffer.java
│ │ │ │ ├── RtspDescriptor.java
│ │ │ │ ├── RtspEntityMessage.java
│ │ │ │ ├── RtspMedia.java
│ │ │ │ ├── RtspMessage.java
│ │ │ │ └── RtspMessageFactory.java
│ │ │ ├── request/
│ │ │ │ ├── RtspDescribeRequest.java
│ │ │ │ ├── RtspOptionsRequest.java
│ │ │ │ ├── RtspPauseRequest.java
│ │ │ │ ├── RtspPlayRequest.java
│ │ │ │ ├── RtspRequest.java
│ │ │ │ ├── RtspSetupRequest.java
│ │ │ │ └── RtspTeardownRequest.java
│ │ │ ├── response/
│ │ │ │ └── RtspResponse.java
│ │ │ └── transport/
│ │ │ ├── TCPTransport.java
│ │ │ └── TCPTransportListener.java
│ │ └── server/
│ │ ├── RtspServer.java
│ │ └── response/
│ │ ├── Parser.java
│ │ ├── RtspAnnounceResponse.java
│ │ ├── RtspDescribeResponse.java
│ │ ├── RtspError.java
│ │ ├── RtspOptionsResponse.java
│ │ ├── RtspPauseResponse.java
│ │ ├── RtspPlayResponse.java
│ │ ├── RtspResponse.java
│ │ ├── RtspResponseTeardown.java
│ │ ├── RtspSetupResponse.java
│ │ └── SDP.java
│ └── rtspcamera/
│ ├── MediaConstants.java
│ ├── RtspApiCodecsCamera.java
│ └── RtspNativeCodecsCamera.java
└── RtspViewer/
├── .classpath
├── .gitignore
├── .project
├── AndroidManifest.xml
├── gpl.txt
├── proguard-project.txt
├── project.properties
├── res/
│ ├── layout/
│ │ └── videoview.xml
│ └── values/
│ └── strings.xml
└── src/
└── de/
└── kp/
└── rtspviewer/
└── RtspViewerActivity.java
Showing preview only (226K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (2439 symbols across 299 files)
FILE: RtspCamera/jni/avc_h264/common/include/avcapi_common.h
type AVCStatus (line 34) | typedef enum
type AVCProfile (line 48) | typedef enum
type AVCLevel (line 64) | typedef enum
type AVCNalUnitType (line 90) | typedef enum
type AVCLogType (line 111) | typedef enum
type AVCFlag (line 122) | typedef enum
type AVCFrameIO (line 132) | typedef struct tagAVCFrameIO
type AVCHandle (line 237) | typedef struct tagAVCHandle
FILE: RtspCamera/jni/avc_h264/common/include/avcint_common.h
type AVCPictureType (line 99) | typedef enum
type AVCSliceType (line 108) | typedef enum
type AVCMBMode (line 126) | typedef enum
type AVCSubMBMode (line 148) | typedef enum
type AVCPredMode (line 164) | typedef enum
type AVCIntra4x4PredMode (line 177) | typedef enum
type AVCIntra16x16PredMode (line 194) | typedef enum
type AVCIntraChromaPredMode (line 207) | typedef enum
type AVCResidualType (line 219) | typedef enum
type AVCHRDParams (line 234) | typedef struct tagHRDParams
type AVCVUIParams (line 253) | typedef struct tagVUIParam
type AVCSeqParamSet (line 298) | typedef struct tagSeqParamSet
type AVCPicParamSet (line 347) | typedef struct tagPicParamSet
type AVCSliceHeader (line 388) | typedef struct tagSliceHeader
type AVCPixelPos (line 462) | typedef struct tagPixPos
type AVCNeighborAvailability (line 472) | typedef struct tagNeighborAvailability
type AVCPictureData (line 485) | typedef struct tagPictureData
type AVCFrameStore (line 517) | typedef struct tagFrameStore
type AVCDecPicBuffer (line 555) | typedef struct tagDecPicBuffer
type AVCMacroblock (line 570) | typedef struct tagMacroblock
type AVCCommonObj (line 614) | typedef struct tagCommonObj
FILE: RtspCamera/jni/avc_h264/common/src/deblock.cpp
function OSCL_EXPORT_REF (line 73) | OSCL_EXPORT_REF AVCStatus DeblockPicture(AVCCommonObj *video)
function MBInLoopDeblock (line 110) | void MBInLoopDeblock(AVCCommonObj *video)
function DeblockMb (line 288) | void DeblockMb(AVCCommonObj *video, int mb_x, int mb_y, uint8 *SrcY, uin...
function GetStrength_Edge0 (line 528) | void GetStrength_Edge0(uint8 *Strength, AVCMacroblock* MbP, AVCMacrobloc...
function GetStrength_VerticalEdges (line 731) | void GetStrength_VerticalEdges(uint8 *Strength, AVCMacroblock* MbQ)
function GetStrength_HorizontalEdges (line 906) | void GetStrength_HorizontalEdges(uint8 Strength[12], AVCMacroblock* MbQ)
function EdgeLoop_Luma_horizontal (line 1084) | void EdgeLoop_Luma_horizontal(uint8* SrcPtr, uint8 *Strength, int Alpha,...
function EdgeLoop_Luma_vertical (line 1273) | void EdgeLoop_Luma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha, i...
function EdgeLoop_Chroma_vertical (line 1490) | void EdgeLoop_Chroma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha,...
function EdgeLoop_Chroma_horizontal (line 1580) | void EdgeLoop_Chroma_horizontal(uint8* SrcPtr, uint8 *Strength, int Alph...
FILE: RtspCamera/jni/avc_h264/common/src/dpb.cpp
function AVCStatus (line 27) | AVCStatus InitDPB(AVCHandle *avcHandle, AVCCommonObj *video, int FrameHe...
function OSCL_EXPORT_REF (line 117) | OSCL_EXPORT_REF AVCStatus AVCConfigureSequence(AVCHandle *avcHandle, AVC...
function OSCL_EXPORT_REF (line 220) | OSCL_EXPORT_REF AVCStatus CleanUpDPB(AVCHandle *avcHandle, AVCCommonObj ...
function OSCL_EXPORT_REF (line 247) | OSCL_EXPORT_REF AVCStatus DPBInitBuffer(AVCHandle *avcHandle, AVCCommonO...
function OSCL_EXPORT_REF (line 290) | OSCL_EXPORT_REF void DPBInitPic(AVCCommonObj *video, int CurrPicNum)
function OSCL_EXPORT_REF (line 340) | OSCL_EXPORT_REF void DPBReleaseCurrentFrame(AVCHandle *avcHandle, AVCCom...
function OSCL_EXPORT_REF (line 365) | OSCL_EXPORT_REF AVCStatus StorePictureInDPB(AVCHandle *avcHandle, AVCCom...
function AVCStatus (line 482) | AVCStatus sliding_window_process(AVCHandle *avcHandle, AVCCommonObj *vid...
function AVCStatus (line 555) | AVCStatus adaptive_memory_marking(AVCHandle *avcHandle, AVCCommonObj *vi...
function MemMgrCtrlOp1 (line 599) | void MemMgrCtrlOp1(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicB...
function MemMgrCtrlOp2 (line 621) | void MemMgrCtrlOp2(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, int long_...
function MemMgrCtrlOp3 (line 638) | void MemMgrCtrlOp3(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicB...
function MemMgrCtrlOp4 (line 672) | void MemMgrCtrlOp4(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicB...
function MemMgrCtrlOp5 (line 693) | void MemMgrCtrlOp5(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicB...
function MemMgrCtrlOp6 (line 710) | void MemMgrCtrlOp6(AVCHandle *avcHandle, AVCCommonObj *video, AVCDecPicB...
function unmark_for_reference (line 723) | void unmark_for_reference(AVCHandle *avcHandle, AVCDecPicBuffer *dpb, ui...
function unmark_long_term_frame_for_reference_by_frame_idx (line 742) | void unmark_long_term_frame_for_reference_by_frame_idx(AVCHandle *avcHan...
FILE: RtspCamera/jni/avc_h264/common/src/fmo.cpp
function OSCL_EXPORT_REF (line 22) | OSCL_EXPORT_REF AVCStatus FMOInit(AVCCommonObj *video)
function FmoGenerateType0MapUnitMap (line 67) | void FmoGenerateType0MapUnitMap(int *mapUnitToSliceGroupMap, uint *run_l...
function FmoGenerateType1MapUnitMap (line 85) | void FmoGenerateType1MapUnitMap(int *mapUnitToSliceGroupMap, int PicWidt...
function FmoGenerateType2MapUnitMap (line 96) | void FmoGenerateType2MapUnitMap(AVCPicParamSet *pps, int *mapUnitToSlice...
function FmoGenerateType3MapUnitMap (line 127) | void FmoGenerateType3MapUnitMap(AVCCommonObj *video, AVCPicParamSet* pps...
function FmoGenerateType4MapUnitMap (line 198) | void FmoGenerateType4MapUnitMap(int *mapUnitToSliceGroupMap, int MapUnit...
function FmoGenerateType5MapUnitMap (line 213) | void FmoGenerateType5MapUnitMap(int *mapUnitToSliceGroupMap, AVCCommonOb...
function FmoGenerateType6MapUnitMap (line 239) | void FmoGenerateType6MapUnitMap(int *mapUnitToSliceGroupMap, int *slice_...
FILE: RtspCamera/jni/avc_h264/common/src/mb_access.cpp
function OSCL_EXPORT_REF (line 21) | OSCL_EXPORT_REF void InitNeighborAvailability(AVCCommonObj *video, int m...
function mb_is_available (line 52) | bool mb_is_available(AVCMacroblock *mblock, uint PicSizeInMbs, int mbAdd...
function OSCL_EXPORT_REF (line 67) | OSCL_EXPORT_REF int predict_nnz(AVCCommonObj *video, int i, int j)
function OSCL_EXPORT_REF (line 127) | OSCL_EXPORT_REF int predict_nnz_chroma(AVCCommonObj *video, int i, int j)
function OSCL_EXPORT_REF (line 185) | OSCL_EXPORT_REF void GetMotionVectorPredictor(AVCCommonObj *video, int e...
FILE: RtspCamera/jni/avc_h264/common/src/reflist.cpp
function OSCL_EXPORT_REF (line 25) | OSCL_EXPORT_REF void RefListInit(AVCCommonObj *video)
function OSCL_EXPORT_REF (line 145) | OSCL_EXPORT_REF AVCStatus ReOrderList(AVCCommonObj *video)
function AVCStatus (line 169) | AVCStatus ReorderRefPicList(AVCCommonObj *video, int isL1)
function AVCStatus (line 262) | AVCStatus ReorderShortTerm(AVCCommonObj *video, int picNumLX, int *refId...
function AVCStatus (line 315) | AVCStatus ReorderLongTerm(AVCCommonObj *video, int LongTermPicNum, int *...
function AVCPictureData (line 362) | AVCPictureData* GetShortTermPic(AVCCommonObj *video, int picNum)
function AVCPictureData (line 383) | AVCPictureData* GetLongTermPic(AVCCommonObj *video, int LongtermPicNum)
function is_short_ref (line 403) | int is_short_ref(AVCPictureData *s)
function is_long_ref (line 408) | int is_long_ref(AVCPictureData *s)
function SortPicByPicNum (line 415) | void SortPicByPicNum(AVCPictureData *data[], int num)
function SortPicByPicNumLongTerm (line 437) | void SortPicByPicNumLongTerm(AVCPictureData *data[], int num)
function SortFrameByFrameNumWrap (line 460) | void SortFrameByFrameNumWrap(AVCFrameStore *data[], int num)
function SortFrameByLTFrameIdx (line 482) | void SortFrameByLTFrameIdx(AVCFrameStore *data[], int num)
function SortPicByPOC (line 504) | void SortPicByPOC(AVCPictureData *data[], int num, int descending)
function SortPicByLTPicNum (line 543) | void SortPicByLTPicNum(AVCPictureData *data[], int num)
function SortFrameByPOC (line 565) | void SortFrameByPOC(AVCFrameStore *data[], int num, int descending)
FILE: RtspCamera/jni/avc_h264/dec/include/avcdec_api.h
type AVCDec_Status (line 36) | typedef enum
type AVCDecSPSInfo (line 68) | typedef struct tagAVCDecSPSInfo
FILE: RtspCamera/jni/avc_h264/dec/include/pvavcdecoder.h
function class (line 30) | class PVAVCDecoder : public PVAVCDecoderInterface
FILE: RtspCamera/jni/avc_h264/dec/include/pvavcdecoder_factory.h
function class (line 31) | class PVAVCDecoderFactory
FILE: RtspCamera/jni/avc_h264/dec/include/pvavcdecoderinterface.h
function class (line 38) | class PVAVCDecoderInterface
FILE: RtspCamera/jni/avc_h264/dec/src/3GPVideoParser.cpp
function uint32 (line 42) | uint32 EndienConvert (uint32 input){
function uint32 (line 49) | uint32 getUint32FromUint8Buffer (uint8* buffer,uint32 offset){
function int32 (line 56) | int32 findAtom (uint8* buffer,uint32 bufferSize, uint32 valueToFind){
function int32 (line 71) | int32 findAtom (uint32* buffer,uint32 bufferSize, uint32 valueToFind){
function cleanupParser (line 86) | int cleanupParser(void){
function Init3GPVideoParser (line 104) | int Init3GPVideoParser (char *filePath){
function getFrame (line 304) | int getFrame (uint8* aOutBuffer,uint32* aBufferSize, uint32* aTimestamp){
function release (line 335) | int release(){
function uint32 (line 347) | uint32 getVideoDuration (){
function uint32 (line 367) | uint32 getVideoWidth (){
function uint32 (line 376) | uint32 getVideoHeight(){
FILE: RtspCamera/jni/avc_h264/dec/src/3GPVideoParser.h
type uint8 (line 13) | typedef unsigned char uint8;
type uint16 (line 14) | typedef unsigned short uint16;
type int16 (line 15) | typedef short int16;
type uint32 (line 16) | typedef unsigned long uint32;
type int32 (line 17) | typedef long int32;
type Atom (line 39) | typedef struct {
type sample (line 44) | struct sample {
type Sample (line 50) | typedef struct sample Sample;
FILE: RtspCamera/jni/avc_h264/dec/src/NativeH264Decoder.cpp
function JNIEXPORT (line 51) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 65) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 78) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 199) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 211) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 222) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 234) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 245) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 256) | JNIEXPORT jstring JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_...
function JNIEXPORT (line 269) | JNIEXPORT jobject JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_...
function jint (line 280) | jint JNI_OnLoad(JavaVM* vm, void* reserved) {
FILE: RtspCamera/jni/avc_h264/dec/src/avc_bitstream.cpp
function AVCDec_Status (line 67) | AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int ...
function AVCDec_Status (line 94) | AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream)
function AVCDec_Status (line 170) | AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint...
function AVCDec_Status (line 194) | AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint...
function AVCDec_Status (line 218) | AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code)
function AVCDec_Status (line 232) | AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream)
function more_rbsp_data (line 252) | bool more_rbsp_data(AVCDecBitstream *stream)
FILE: RtspCamera/jni/avc_h264/dec/src/avcdec_api.cpp
function AVCDec_Status (line 58) | AVCDec_Status EBSPtoRBSP(uint8 *nal_unit, int *size)
function OSCL_EXPORT_REF (line 105) | OSCL_EXPORT_REF AVCDec_Status PVAVCAnnexBGetNALUnit(uint8 *bitstream, ui...
function OSCL_EXPORT_REF (line 170) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetNALType(uint8 *bitstream, int s...
function OSCL_EXPORT_REF (line 196) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecSeqParamSet(AVCHandle *avcHandle...
function OSCL_EXPORT_REF (line 366) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetSeqInfo(AVCHandle *avcHandle, A...
function OSCL_EXPORT_REF (line 437) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecPicParamSet(AVCHandle *avcHandle...
function OSCL_EXPORT_REF (line 490) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecSEI(AVCHandle *avcHandle, uint8 ...
function OSCL_EXPORT_REF (line 507) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecodeSlice(AVCHandle *avcHandle, uin...
function OSCL_EXPORT_REF (line 857) | OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetOutput(AVCHandle *avcHandle, in...
function OSCL_EXPORT_REF (line 1044) | OSCL_EXPORT_REF void PVAVCDecReset(AVCHandle *avcHandle)
function OSCL_EXPORT_REF (line 1112) | OSCL_EXPORT_REF void PVAVCCleanUpDecoder(AVCHandle *avcHandle)
FILE: RtspCamera/jni/avc_h264/dec/src/avcdec_int.h
type AVCDecBitstream (line 40) | typedef struct tagDecBitstream
type AVCDecObject (line 60) | typedef struct tagDecObject
FILE: RtspCamera/jni/avc_h264/dec/src/header.cpp
function AVCDec_Status (line 24) | AVCDec_Status DecodeSPS(AVCDecObject *decvid, AVCDecBitstream *stream)
function AVCDec_Status (line 183) | AVCDec_Status vui_parameters(AVCDecObject *decvid, AVCDecBitstream *stre...
function AVCDec_Status (line 291) | AVCDec_Status hrd_parameters(AVCDecObject *decvid, AVCDecBitstream *stre...
function AVCDec_Status (line 329) | AVCDec_Status DecodePPS(AVCDecObject *decvid, AVCCommonObj *video, AVCDe...
function AVCDec_Status (line 549) | AVCDec_Status DecodeSliceHeader(AVCDecObject *decvid, AVCCommonObj *vide...
function AVCDec_Status (line 808) | AVCDec_Status fill_frame_num_gap(AVCHandle *avcHandle, AVCCommonObj *video)
function AVCDec_Status (line 865) | AVCDec_Status ref_pic_list_reordering(AVCCommonObj *video, AVCDecBitstre...
function AVCDec_Status (line 912) | AVCDec_Status dec_ref_pic_marking(AVCCommonObj *video, AVCDecBitstream *...
function AVCDec_Status (line 970) | AVCDec_Status DecodePOC(AVCCommonObj *video)
function AVCDec_Status (line 1111) | AVCDec_Status DecodeSEI(AVCDecObject *decvid, AVCDecBitstream *stream)
function AVCDec_Status (line 1118) | AVCDec_Status sei_payload(AVCDecObject *decvid, AVCDecBitstream *stream,...
function AVCDec_Status (line 1189) | AVCDec_Status buffering_period(AVCDecObject *decvid, AVCDecBitstream *st...
function AVCDec_Status (line 1228) | AVCDec_Status pic_timing(AVCDecObject *decvid, AVCDecBitstream *stream)
function AVCDec_Status (line 1351) | AVCDec_Status recovery_point(AVCDecObject *decvid, AVCDecBitstream *stream)
function AVCDec_Status (line 1365) | AVCDec_Status dec_ref_pic_marking_repetition(AVCDecObject *decvid, AVCDe...
function AVCDec_Status (line 1390) | AVCDec_Status motion_constrained_slice_group_set(AVCDecObject *decvid, A...
FILE: RtspCamera/jni/avc_h264/dec/src/itrans.cpp
function Intra16DCTrans (line 24) | void Intra16DCTrans(int16 *block, int Qq, int Rq)
function itrans (line 90) | void itrans(int16 *block, uint8 *pred, uint8 *cur, int width)
function ictrans (line 170) | void ictrans(int16 *block, uint8 *pred, uint8 *cur, int width)
function ChromaDCTrans (line 250) | void ChromaDCTrans(int16 *block, int Qq, int Rq)
function copy_block (line 286) | void copy_block(uint8 *pred, uint8 *cur, int width, int pred_pitch)
FILE: RtspCamera/jni/avc_h264/dec/src/pred_inter.cpp
function InterMBPrediction (line 38) | void InterMBPrediction(AVCCommonObj *video)
function LumaMotionComp (line 240) | void LumaMotionComp(uint8 *ref, int picwidth, int picheight,
function CreateAlign (line 366) | void CreateAlign(uint8 *ref, int picwidth, int y_pos,
function CreatePad (line 446) | void CreatePad(uint8 *ref, int picwidth, int picheight, int x_pos, int y...
function HorzInterp1MC (line 548) | void HorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
function HorzInterp2MC (line 838) | void HorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
function HorzInterp3MC (line 997) | void HorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
function VertInterp1MC (line 1062) | void VertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
function VertInterp2MC (line 1365) | void VertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
function VertInterp3MC (line 1431) | void VertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
function DiagonalInterpMC (line 1590) | void DiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
function FullPelMC (line 1936) | void FullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
function ChromaMotionComp (line 1984) | void ChromaMotionComp(uint8 *ref, int picwidth, int picheight,
function ChromaDiagonalMC_SIMD (line 2020) | void ChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function ChromaHorizontalMC_SIMD (line 2112) | void ChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function ChromaVerticalMC_SIMD (line 2160) | void ChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function ChromaDiagonalMC2_SIMD (line 2202) | void ChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function ChromaHorizontalMC2_SIMD (line 2244) | void ChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function ChromaVerticalMC2_SIMD (line 2265) | void ChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function ChromaFullMC_SIMD (line 2290) | void ChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
FILE: RtspCamera/jni/avc_h264/dec/src/pred_intra.cpp
function AVCStatus (line 27) | AVCStatus IntraMBPrediction(AVCCommonObj *video)
function SaveNeighborForIntraPred (line 365) | void SaveNeighborForIntraPred(AVCCommonObj *video, int offset)
function AVCStatus (line 509) | AVCStatus Intra_4x4(AVCCommonObj *video, int block_x, int block_y, uint8...
function Intra_4x4_Vertical (line 712) | void Intra_4x4_Vertical(AVCCommonObj *video, int block_offset)
function Intra_4x4_Horizontal (line 737) | void Intra_4x4_Horizontal(AVCCommonObj *video, int pitch, int block_offset)
function Intra_4x4_DC (line 771) | void Intra_4x4_DC(AVCCommonObj *video, int pitch, int block_offset,
function Intra_4x4_Down_Left (line 825) | void Intra_4x4_Down_Left(AVCCommonObj *video, int block_offset,
function Intra_4x4_Diagonal_Down_Right (line 901) | void Intra_4x4_Diagonal_Down_Right(AVCCommonObj *video, int pitch, int
function Intra_4x4_Diagonal_Vertical_Right (line 966) | void Intra_4x4_Diagonal_Vertical_Right(AVCCommonObj *video, int pitch...
function Intra_4x4_Diagonal_Horizontal_Down (line 1036) | void Intra_4x4_Diagonal_Horizontal_Down(AVCCommonObj *video, int pitch,
function Intra_4x4_Vertical_Left (line 1109) | void Intra_4x4_Vertical_Left(AVCCommonObj *video, int block_offset, AVCN...
function Intra_4x4_Horizontal_Up (line 1168) | void Intra_4x4_Horizontal_Up(AVCCommonObj *video, int pitch, int block_o...
function Intra_16x16_Vertical (line 1220) | void Intra_16x16_Vertical(AVCCommonObj *video)
function Intra_16x16_Horizontal (line 1254) | void Intra_16x16_Horizontal(AVCCommonObj *video, int pitch)
function Intra_16x16_DC (line 1277) | void Intra_16x16_DC(AVCCommonObj *video, int pitch)
function Intra_16x16_Plane (line 1357) | void Intra_16x16_Plane(AVCCommonObj *video, int pitch)
function Intra_Chroma_DC (line 1483) | void Intra_Chroma_DC(AVCCommonObj *video, int pitch, uint8 *predCb, uint...
function Intra_Chroma_Horizontal (line 1621) | void Intra_Chroma_Horizontal(AVCCommonObj *video, int pitch, uint8 *pre...
function Intra_Chroma_Vertical (line 1655) | void Intra_Chroma_Vertical(AVCCommonObj *video, uint8 *predCb, uint8 *p...
function Intra_Chroma_Plane (line 1683) | void Intra_Chroma_Plane(AVCCommonObj *video, int pitch, uint8 *predCb, ...
FILE: RtspCamera/jni/avc_h264/dec/src/pvavcdecoder.cpp
function CbAvcDecDebugLog (line 28) | void CbAvcDecDebugLog(uint32 *userData, AVCLogType type, char *string1, ...
function CbAvcDecMalloc (line 39) | int CbAvcDecMalloc(void *userData, int32 size, int attribute)
function CbAvcDecFree (line 51) | void CbAvcDecFree(void *userData, int mem)
function CbAvcDecDPBAlloc (line 60) | int CbAvcDecDPBAlloc(void *userData, uint frame_size_in_mbs, uint num_bu...
function CbAvcDecFrameUnbind (line 67) | void CbAvcDecFrameUnbind(void *userData, int indx)
function CbAvcDecFrameBind (line 76) | int CbAvcDecFrameBind(void *userData, int indx, uint8 **yuv)
function PVAVCDecoder (line 99) | PVAVCDecoder* PVAVCDecoder::New(void)
function int32 (line 150) | int32 PVAVCDecoder::DecodeSPS(uint8 *bitstream, int32 buffer_size)
function int32 (line 157) | int32 PVAVCDecoder::DecodePPS(uint8 *bitstream, int32 buffer_size)
function int32 (line 164) | int32 PVAVCDecoder::DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size)
FILE: RtspCamera/jni/avc_h264/dec/src/pvavcdecoder_factory.cpp
function PVAVCDecoderInterface (line 38) | PVAVCDecoderInterface* PVAVCDecoderFactory::CreatePVAVCDecoder()
function OSCL_EXPORT_REF (line 50) | OSCL_EXPORT_REF bool PVAVCDecoderFactory::DeletePVAVCDecoder(PVAVCDecode...
FILE: RtspCamera/jni/avc_h264/dec/src/residual.cpp
function AVCDec_Status (line 22) | AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream)
function AVCDec_Status (line 170) | AVCDec_Status residual(AVCDecObject *decvid, AVCMacroblock *currMB)
function AVCDec_Status (line 350) | AVCDec_Status residual_block_cavlc(AVCDecObject *decvid, int nC, int max...
FILE: RtspCamera/jni/avc_h264/dec/src/slice.cpp
function AVCDec_Status (line 28) | AVCDec_Status DecodeSlice(AVCDecObject *decvid)
function AVCDec_Status (line 115) | AVCDec_Status DecodeMB(AVCDecObject *decvid)
function AVCDec_Status (line 279) | AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDec...
function AVCDec_Status (line 388) | AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AV...
function InterpretMBModeI (line 447) | void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type)
function InterpretMBModeP (line 478) | void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type)
function InterpretMBModeB (line 509) | void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type)
function InterpretMBModeSI (line 549) | void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type)
function InterpretSubMBModeP (line 566) | void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type)
function InterpretSubMBModeB (line 587) | void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type)
function AVCDec_Status (line 616) | AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *cur...
function AVCDec_Status (line 714) | AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mb...
FILE: RtspCamera/jni/avc_h264/dec/src/vlc.cpp
type tagVLCNumCoeffTrail (line 33) | struct tagVLCNumCoeffTrail
type tagShiftOffset (line 40) | struct tagShiftOffset
function AVCDec_Status (line 97) | AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum)
function AVCDec_Status (line 122) | AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value)
function AVCDec_Status (line 152) | AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value)
function AVCDec_Status (line 172) | AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range)
function AVCDec_Status (line 190) | AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadi...
function AVCDec_Status (line 241) | AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream)
function AVCDec_Status (line 272) | AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *Tr...
function AVCDec_Status (line 508) | AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream,...
function AVCDec_Status (line 546) | AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code)
function AVCDec_Status (line 561) | AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int Tota...
function AVCDec_Status (line 745) | AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, ...
function AVCDec_Status (line 769) | AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zeros...
FILE: RtspCamera/jni/avc_h264/dec/src/yuv2rgb.cpp
function convert (line 10) | void convert (int width,int height, uint8 *in,uint32 *out){
FILE: RtspCamera/jni/avc_h264/enc/include/pvavcencoder.h
function class (line 31) | class PVAVCEncoder : public PVAVCEncoderInterface
FILE: RtspCamera/jni/avc_h264/enc/include/pvavcencoder_factory.h
function class (line 31) | class PVAVCEncoderFactory
FILE: RtspCamera/jni/avc_h264/enc/include/pvavcencoderinterface.h
type TAVCEI_RETVAL (line 28) | enum TAVCEI_RETVAL
type TAVCEIVideoFormat (line 45) | enum TAVCEIVideoFormat
type TAVCEIEncodingMode (line 55) | enum TAVCEIEncodingMode
type TAVCEIRateControlType (line 71) | enum TAVCEIRateControlType
type TAVCEIProfile (line 84) | enum TAVCEIProfile
type TAVCEILevel (line 98) | enum TAVCEILevel
type TAVCEIOutputFormat (line 120) | enum TAVCEIOutputFormat
type TAVCEIEncodeParam (line 134) | struct TAVCEIEncodeParam
type TAVCEIInputFormat (line 242) | struct TAVCEIInputFormat
type TAVCEIInputData (line 262) | struct TAVCEIInputData
type TAVCEIOutputData (line 272) | struct TAVCEIOutputData
function class (line 304) | class PVAVCEncoderInterface
FILE: RtspCamera/jni/avc_h264/enc/src/NativeH264Encoder.cpp
function JNIEXPORT (line 37) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 140) | JNIEXPORT jbyteArray JNICALL Java_com_orangelabs_rcs_core_ims_protocol_r...
function JNIEXPORT (line 233) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 243) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function jint (line 257) | jint JNI_OnLoad(JavaVM* vm, void* reserved) {
FILE: RtspCamera/jni/avc_h264/enc/src/avcenc_api.cpp
function OSCL_EXPORT_REF (line 35) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncGetNALType(unsigned char *bitstrea...
function OSCL_EXPORT_REF (line 60) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncGetProfileLevel(AVCHandle* avcHand...
function OSCL_EXPORT_REF (line 87) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncInitialize(AVCHandle *avcHandle, A...
function OSCL_EXPORT_REF (line 322) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncGetMaxOutputBufferSize(AVCHandle *...
function OSCL_EXPORT_REF (line 344) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncSetInput(AVCHandle *avcHandle, AVC...
function OSCL_EXPORT_REF (line 459) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncodeNAL(AVCHandle *avcHandle, unsig...
function OSCL_EXPORT_REF (line 639) | OSCL_EXPORT_REF uint8* PVAVCEncGetOverrunBuffer(AVCHandle* avcHandle)
function OSCL_EXPORT_REF (line 668) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncGetRecon(AVCHandle *avcHandle, AVC...
function OSCL_EXPORT_REF (line 693) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncReleaseRecon(AVCHandle *avcHandle,...
function OSCL_EXPORT_REF (line 709) | OSCL_EXPORT_REF void PVAVCCleanUpEncoder(AVCHandle *avcHandle)
function OSCL_EXPORT_REF (line 811) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncUpdateBitRate(AVCHandle *avcHandle...
function OSCL_EXPORT_REF (line 868) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncUpdateFrameRate(AVCHandle *avcHand...
function OSCL_EXPORT_REF (line 926) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncUpdateIDRInterval(AVCHandle *avcHa...
function OSCL_EXPORT_REF (line 958) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncIDRRequest(AVCHandle *avcHandle)
function OSCL_EXPORT_REF (line 988) | OSCL_EXPORT_REF AVCEnc_Status PVAVCEncUpdateIMBRefresh(AVCHandle *avcHan...
function PVAVCEncGetFrameStats (line 1008) | void PVAVCEncGetFrameStats(AVCHandle *avcHandle, AVCEncFrameStats *avcSt...
FILE: RtspCamera/jni/avc_h264/enc/src/avcenc_api.h
type AVCEnc_Status (line 42) | typedef enum
type AVCEncParams (line 98) | typedef struct tagAVCEncParam
type AVCEncFrameStats (line 172) | typedef struct tagAVCEncFrameStats
FILE: RtspCamera/jni/avc_h264/enc/src/avcenc_int.h
type AVCEnc_State (line 112) | typedef enum
type AVCEncBitstream (line 129) | typedef struct tagEncBitstream
type RDInfo (line 151) | typedef struct tagRDInfo
type MultiPass (line 159) | typedef struct tagMultiPass
type dataPointArray (line 204) | typedef struct tagdataPointArray
type AVCRateControl (line 213) | typedef struct tagAVCRateControl
type AVCMV (line 322) | typedef struct tagMV
type AVCEncFuncPtr (line 332) | typedef struct tagAVCEncFuncPtr
type AVCPadInfo (line 343) | typedef struct tagPadInfo
type HTFM_Stat (line 353) | typedef struct tagHTFM_Stat
type AVCEncObject (line 368) | typedef struct tagEncObject
FILE: RtspCamera/jni/avc_h264/enc/src/bitstream_io.cpp
function AVCEnc_Status (line 46) | AVCEnc_Status BitstreamEncInit(AVCEncBitstream *stream, uint8 *buffer, i...
function AVCEnc_Status (line 83) | AVCEnc_Status AVCBitstreamSaveWord(AVCEncBitstream *stream)
function AVCEnc_Status (line 143) | AVCEnc_Status BitstreamWriteBits(AVCEncBitstream *stream, int nBits, uin...
function AVCEnc_Status (line 195) | AVCEnc_Status BitstreamWrite1Bit(AVCEncBitstream *stream, uint code)
function AVCEnc_Status (line 225) | AVCEnc_Status BitstreamTrailingBits(AVCEncBitstream *bitstream, uint *na...
function byte_aligned (line 253) | bool byte_aligned(AVCEncBitstream *stream)
function AVCEnc_Status (line 263) | AVCEnc_Status AVCBitstreamUseOverrunBuffer(AVCEncBitstream* stream, int ...
FILE: RtspCamera/jni/avc_h264/enc/src/block.cpp
function trans (line 22) | void trans(uint8 *cur, int pitch, uint8 *predBlock, int16 *dataBlock)
function dct_luma (line 78) | int dct_luma(AVCEncObject *encvid, int blkidx, uint8 *cur, uint8 *org, i...
function MBInterIdct (line 261) | void MBInterIdct(AVCCommonObj *video, uint8 *curL, AVCMacroblock *currMB...
function dct_luma_16x16 (line 361) | void dct_luma_16x16(AVCEncObject *encvid, uint8 *curL, uint8 *orgL)
function dct_chroma (line 723) | void dct_chroma(AVCEncObject *encvid, uint8 *curC, uint8 *orgC, int cr)
function TransQuantIntra16DC (line 1115) | int TransQuantIntra16DC(AVCEncObject *encvid)
function TransQuantChromaDC (line 1208) | int TransQuantChromaDC(AVCEncObject *encvid, int16 *block, int slice_typ...
FILE: RtspCamera/jni/avc_h264/enc/src/findhalfpel.cpp
function AVCFindHalfPelMB (line 55) | int AVCFindHalfPelMB(AVCEncObject *encvid, uint8 *cur, AVCMV *mot, uint8...
function GenerateHalfPelPred (line 147) | void GenerateHalfPelPred(uint8* subpel_pred, uint8 *ncand, int lx)
function VertInterpWClip (line 459) | void VertInterpWClip(uint8 *dst, uint8 *ref)
function GenerateQuartPelPred (line 519) | void GenerateQuartPelPred(uint8 **bilin_base, uint8 *qpel_cand, int hpel...
function SATD_MB (line 609) | int SATD_MB(uint8 *cand, uint8 *cur, int dmin)
FILE: RtspCamera/jni/avc_h264/enc/src/header.cpp
function AVCEnc_Status (line 25) | AVCEnc_Status EncodeSPS(AVCEncObject *encvid, AVCEncBitstream *stream)
function EncodeVUI (line 113) | void EncodeVUI(AVCEncBitstream* stream, AVCVUIParams* vui)
function EncodeHRD (line 200) | void EncodeHRD(AVCEncBitstream* stream, AVCHRDParams* hrd)
function AVCEnc_Status (line 227) | AVCEnc_Status EncodePPS(AVCEncObject *encvid, AVCEncBitstream *stream)
function AVCEnc_Status (line 316) | AVCEnc_Status EncodeSliceHeader(AVCEncObject *encvid, AVCEncBitstream *s...
function AVCEnc_Status (line 506) | AVCEnc_Status ref_pic_list_reordering(AVCCommonObj *video, AVCEncBitstre...
function AVCEnc_Status (line 587) | AVCEnc_Status dec_ref_pic_marking(AVCCommonObj *video, AVCEncBitstream *...
function AVCEnc_Status (line 648) | AVCEnc_Status InitPOC(AVCEncObject *encvid)
function AVCEnc_Status (line 881) | AVCEnc_Status PostPOC(AVCCommonObj *video)
FILE: RtspCamera/jni/avc_h264/enc/src/init.cpp
function AVCEnc_Status (line 26) | AVCEnc_Status SetEncodeParam(AVCHandle* avcHandle, AVCEncParams* encParam,
function AVCEnc_Status (line 570) | AVCEnc_Status VerifyProfile(AVCEncObject *encvid, AVCSeqParamSet *seqPar...
function AVCEnc_Status (line 640) | AVCEnc_Status VerifyLevel(AVCEncObject *encvid, AVCSeqParamSet *seqParam...
function AVCEnc_Status (line 704) | AVCEnc_Status InitFrame(AVCEncObject *encvid)
function AVCEnc_Status (line 803) | AVCEnc_Status InitSlice(AVCEncObject *encvid)
FILE: RtspCamera/jni/avc_h264/enc/src/intra_est.cpp
function IntraDecisionABE (line 33) | bool IntraDecisionABE(AVCEncObject *encvid, int min_cost, uint8 *curL, i...
function MBIntraSearch (line 112) | void MBIntraSearch(AVCEncObject *encvid, int mbnum, uint8 *curL, int pic...
function intrapred_luma_16x16 (line 207) | void intrapred_luma_16x16(AVCEncObject *encvid)
function find_cost_16x16 (line 432) | void find_cost_16x16(AVCEncObject *encvid, uint8 *orgY, int *min_cost)
function cost_i16 (line 493) | int cost_i16(uint8 *org, int org_pitch, uint8 *pred, int min_cost)
function mb_intra4x4_search (line 626) | void mb_intra4x4_search(AVCEncObject *encvid, int *min_cost)
function blk_intra4x4_search (line 721) | int blk_intra4x4_search(AVCEncObject *encvid, int blkidx, uint8 *cur, ui...
function FindMostProbableI4Mode (line 1224) | int FindMostProbableI4Mode(AVCCommonObj *video, int blkidx)
function cost_i4 (line 1290) | void cost_i4(uint8 *org, int org_pitch, uint8 *pred, uint16 *cost)
function chroma_intra_search (line 1364) | void chroma_intra_search(AVCEncObject *encvid)
function SATDChroma (line 1726) | int SATDChroma(uint8 *orgCb, uint8 *orgCr, int org_pitch, uint8 *pred, i...
function MBIntraSearch (line 1852) | void MBIntraSearch(AVCEncObject *encvid, AVCMacroblock *currMB, int mbNum)
function MBIntraSearch (line 2005) | void MBIntraSearch(AVCEncObject *encvid, AVCMacroblock *currMB, int mbNum)
FILE: RtspCamera/jni/avc_h264/enc/src/motion_comp.cpp
function AVCMBMotionComp (line 39) | void AVCMBMotionComp(AVCEncObject *encvid, AVCCommonObj *video)
function eLumaMotionComp (line 122) | void eLumaMotionComp(uint8 *ref, int picpitch, int picheight,
function eCreateAlign (line 191) | void eCreateAlign(uint8 *ref, int picpitch, int y_pos,
function eHorzInterp1MC (line 269) | void eHorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
function eHorzInterp2MC (line 559) | void eHorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
function eHorzInterp3MC (line 718) | void eHorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
function eVertInterp1MC (line 783) | void eVertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
function eVertInterp2MC (line 1086) | void eVertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
function eVertInterp3MC (line 1152) | void eVertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
function eDiagonalInterpMC (line 1311) | void eDiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
function eFullPelMC (line 1657) | void eFullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
function ePadChroma (line 1705) | void ePadChroma(uint8 *ref, int picwidth, int picheight, int picpitch, i...
function eChromaMotionComp (line 1813) | void eChromaMotionComp(uint8 *ref, int picwidth, int picheight,
function eChromaDiagonalMC_SIMD (line 1841) | void eChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function eChromaHorizontalMC_SIMD (line 1933) | void eChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function eChromaVerticalMC_SIMD (line 1982) | void eChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function eChromaDiagonalMC2_SIMD (line 2025) | void eChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function eChromaHorizontalMC2_SIMD (line 2068) | void eChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function eChromaVerticalMC2_SIMD (line 2090) | void eChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
function eChromaFullMC_SIMD (line 2116) | void eChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
FILE: RtspCamera/jni/avc_h264/enc/src/motion_est.cpp
function AVCEnc_Status (line 69) | AVCEnc_Status InitMotionSearchModule(AVCHandle *avcHandle)
function CleanMotionSearchModule (line 174) | void CleanMotionSearchModule(AVCHandle *avcHandle)
function IntraDecisionABE (line 188) | bool IntraDecisionABE(int *min_cost, uint8 *cur, int pitch, bool ave)
function AVCMotionEstimation (line 238) | void AVCMotionEstimation(AVCEncObject *encvid)
function AVCPaddingEdge (line 487) | void AVCPaddingEdge(AVCPictureData *refPic)
function AVCRasterIntraUpdate (line 577) | void AVCRasterIntraUpdate(AVCEncObject *encvid, AVCMacroblock *mblock, i...
function InitHTFM (line 607) | void InitHTFM(VideoEncData *encvid, HTFM_Stat *htfm_stat, double *newvar...
function UpdateHTFM (line 693) | void UpdateHTFM(AVCEncObject *encvid, double *newvar, double *exp_lamda,...
function CalcThreshold (line 725) | void CalcThreshold(double pf, double exp_lamda[], int nrmlz_th[])
function HTFMPrepareCurMB_AVC (line 748) | void HTFMPrepareCurMB_AVC(AVCEncObject *encvid, HTFM_Stat *htfm_stat,...
function AVCPrepareCurMB (line 812) | void AVCPrepareCurMB(AVCEncObject *encvid, uint8 *cur, int pitch)
function AVCEnc_Status (line 844) | AVCEnc_Status AVCMBMotionSearch(AVCEncObject *encvid, AVCMacroblock *cur...
function AVCMBMotionSearch (line 961) | void AVCMBMotionSearch(AVCEncObject *encvid, uint8 *cur, uint8 *best_can...
function AVCFullSearch (line 1266) | int AVCFullSearch(AVCEncObject *encvid, uint8 *prev, uint8 *cur,
function AVCCandidateSelection (line 1362) | void AVCCandidateSelection(int *mvx, int *mvy, int *num_can, int imb, in...
function AVCMoveNeighborSAD (line 1686) | void AVCMoveNeighborSAD(int dn[], int new_loc)
function AVCFindMin (line 1760) | int AVCFindMin(int dn[])
FILE: RtspCamera/jni/avc_h264/enc/src/pvavcencoder.cpp
function CbAvcEncDebugLog (line 28) | void CbAvcEncDebugLog(uint32 *userData, AVCLogType type, char *string1, ...
function CbAvcEncMalloc (line 39) | int CbAvcEncMalloc(void *userData, int32 size, int attribute)
function CbAvcEncFree (line 51) | void CbAvcEncFree(void *userData, int mem)
function CbAvcEncDPBAlloc (line 60) | int CbAvcEncDPBAlloc(void *userData, uint frame_size_in_mbs, uint num_bu...
function CbAvcEncFrameUnbind (line 67) | void CbAvcEncFrameUnbind(void *userData, int indx)
function CbAvcEncFrameBind (line 76) | int CbAvcEncFrameBind(void *userData, int indx, uint8 **yuv)
function OSCL_EXPORT_REF (line 93) | OSCL_EXPORT_REF PVAVCEncoder::~PVAVCEncoder()
function OSCL_EXPORT_REF (line 99) | OSCL_EXPORT_REF PVAVCEncoder* PVAVCEncoder::New()
function OSCL_EXPORT_REF (line 132) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::Initialize(TAVCEIInputFormat...
function int32 (line 164) | int32 PVAVCEncoder::GetMaxOutputBufferSize()
function TAVCEI_RETVAL (line 174) | TAVCEI_RETVAL PVAVCEncoder::Init(TAVCEIInputFormat* aVidInFormat, TAVCEI...
function OSCL_EXPORT_REF (line 323) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::GetParameterSet(uint8 *param...
function OSCL_EXPORT_REF (line 376) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::Encode(TAVCEIInputData *aVidIn)
function OSCL_EXPORT_REF (line 473) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::GetOutput(TAVCEIOutputData *...
function OSCL_EXPORT_REF (line 640) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::FlushInput()
function TAVCEI_RETVAL (line 647) | TAVCEI_RETVAL PVAVCEncoder::CleanupEncoder()
function OSCL_EXPORT_REF (line 679) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::UpdateBitRate(int32 *aBitRate)
function OSCL_EXPORT_REF (line 688) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::UpdateFrameRate(OsclFloat *a...
function OSCL_EXPORT_REF (line 697) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::UpdateIDRFrameInterval(int32...
function OSCL_EXPORT_REF (line 706) | OSCL_EXPORT_REF TAVCEI_RETVAL PVAVCEncoder::IDRRequest()
function OSCL_EXPORT_REF (line 715) | OSCL_EXPORT_REF int32 PVAVCEncoder::GetEncodeWidth(int32 aLayer)
function OSCL_EXPORT_REF (line 722) | OSCL_EXPORT_REF int32 PVAVCEncoder::GetEncodeHeight(int32 aLayer)
function OSCL_EXPORT_REF (line 729) | OSCL_EXPORT_REF OsclFloat PVAVCEncoder::GetEncodeFrameRate(int32 aLayer)
function AVCProfile (line 776) | AVCProfile PVAVCEncoder::mapProfile(TAVCEIProfile in)
function AVCLevel (line 812) | AVCLevel PVAVCEncoder::mapLevel(TAVCEILevel in)
FILE: RtspCamera/jni/avc_h264/enc/src/pvavcencoder_factory.cpp
function PVAVCEncoderInterface (line 37) | PVAVCEncoderInterface* PVAVCEncoderFactory::CreatePVAVCEncoder()
function OSCL_EXPORT_REF (line 49) | OSCL_EXPORT_REF bool PVAVCEncoderFactory::DeletePVAVCEncoder(PVAVCEncode...
FILE: RtspCamera/jni/avc_h264/enc/src/rate_control.cpp
function GetAvgFrameQP (line 47) | int GetAvgFrameQP(AVCRateControl *rateCtrl)
function AVCEnc_Status (line 52) | AVCEnc_Status RCDetermineFrameNum(AVCEncObject *encvid, AVCRateControl *...
function RCUpdateBuffer (line 139) | void RCUpdateBuffer(AVCCommonObj *video, AVCRateControl *rateCtrl, int f...
function AVCEnc_Status (line 166) | AVCEnc_Status InitRateControlModule(AVCHandle *avcHandle)
function CleanupRateControlModule (line 294) | void CleanupRateControlModule(AVCHandle *avcHandle)
function RCInitGOP (line 324) | void RCInitGOP(AVCEncObject *encvid)
function RCInitFrameQP (line 334) | void RCInitFrameQP(AVCEncObject *encvid)
function calculateQuantizer_Multipass (line 406) | void calculateQuantizer_Multipass(AVCEncObject *encvid, AVCCommonObj *vi...
function targetBitCalculation (line 521) | void targetBitCalculation(AVCEncObject *encvid, AVCCommonObj *video, AVC...
function updateRC_PostProc (line 665) | void updateRC_PostProc(AVCRateControl *rateCtrl, MultiPass *pMP)
function RCInitChromaQP (line 693) | void RCInitChromaQP(AVCEncObject *encvid)
function RCInitMBQP (line 735) | void RCInitMBQP(AVCEncObject *encvid)
function RCPostMB (line 746) | void RCPostMB(AVCCommonObj *video, AVCRateControl *rateCtrl, int num_hea...
function RCRestoreQP (line 755) | void RCRestoreQP(AVCMacroblock *currMB, AVCCommonObj *video, AVCEncObjec...
function RCCalculateMAD (line 764) | void RCCalculateMAD(AVCEncObject *encvid, AVCMacroblock *currMB, uint8 *...
function AVCEnc_Status (line 793) | AVCEnc_Status RCUpdateFrame(AVCEncObject *encvid)
function AVCSaveRDSamples (line 847) | void AVCSaveRDSamples(MultiPass *pMP, int counter_samples)
function updateRateControl (line 858) | void updateRateControl(AVCRateControl *rateCtrl, int nal_type)
function ComputeFrameMAD (line 899) | double ComputeFrameMAD(AVCCommonObj *video, AVCRateControl *rateCtrl)
function QP2Qstep (line 915) | double QP2Qstep(int QP)
function Qstep2QP (line 929) | int Qstep2QP(double Qstep)
function RCUpdateParams (line 980) | void RCUpdateParams(AVCRateControl *rateCtrl, AVCEncObject *encvid)
FILE: RtspCamera/jni/avc_h264/enc/src/residual.cpp
function AVCEnc_Status (line 21) | AVCEnc_Status EncodeIntraPCM(AVCEncObject *encvid)
function AVCEnc_Status (line 146) | AVCEnc_Status enc_residual_block(AVCEncObject *encvid, AVCResidualType t...
FILE: RtspCamera/jni/avc_h264/enc/src/sad.cpp
function AVCSAD_Macroblock_C (line 62) | int AVCSAD_Macroblock_C(uint8 *ref, uint8 *blk, int dmin_lx, void *extra...
function AVCAVCSAD_MB_HTFM_Collect (line 90) | int AVCAVCSAD_MB_HTFM_Collect(uint8 *ref, uint8 *blk, int dmin_lx, void ...
function AVCSAD_MB_HTFM (line 195) | int AVCSAD_MB_HTFM(uint8 *ref, uint8 *blk, int dmin_lx, void *extra_info)
FILE: RtspCamera/jni/avc_h264/enc/src/sad_halfpel.cpp
function AVCSAD_MB_HalfPel_Cxhyh (line 68) | int AVCSAD_MB_HalfPel_Cxhyh(uint8 *ref, uint8 *blk, int dmin_rx, void *e...
function AVCSAD_MB_HalfPel_Cyh (line 109) | int AVCSAD_MB_HalfPel_Cyh(uint8 *ref, uint8 *blk, int dmin_rx, void *ext...
function AVCSAD_MB_HalfPel_Cxh (line 145) | int AVCSAD_MB_HalfPel_Cxh(uint8 *ref, uint8 *blk, int dmin_rx, void *ext...
function AVCAVCSAD_MB_HP_HTFM_Collectxhyh (line 181) | int AVCAVCSAD_MB_HP_HTFM_Collectxhyh(uint8 *ref, uint8 *blk, int dmin_rx...
function AVCAVCSAD_MB_HP_HTFM_Collectyh (line 263) | int AVCAVCSAD_MB_HP_HTFM_Collectyh(uint8 *ref, uint8 *blk, int dmin_rx, ...
function AVCAVCSAD_MB_HP_HTFM_Collectxh (line 344) | int AVCAVCSAD_MB_HP_HTFM_Collectxh(uint8 *ref, uint8 *blk, int dmin_rx, ...
function AVCSAD_MB_HP_HTFMxhyh (line 424) | int AVCSAD_MB_HP_HTFMxhyh(uint8 *ref, uint8 *blk, int dmin_rx, void *ext...
function AVCSAD_MB_HP_HTFMyh (line 492) | int AVCSAD_MB_HP_HTFMyh(uint8 *ref, uint8 *blk, int dmin_rx, void *extra...
function AVCSAD_MB_HP_HTFMxh (line 558) | int AVCSAD_MB_HP_HTFMxh(uint8 *ref, uint8 *blk, int dmin_rx, void *extra...
FILE: RtspCamera/jni/avc_h264/enc/src/sad_halfpel_inline.h
function int32 (line 29) | __inline int32 INTERP1_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 38) | __inline int32 INTERP2_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 49) | __inline int32 INTERP1_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 61) | __inline int32 INTERP2_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 75) | __inline int32 INTERP1_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 82) | __inline int32 INTERP2_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
FILE: RtspCamera/jni/avc_h264/enc/src/sad_inline.h
function int32 (line 28) | __inline int32 SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 37) | __inline int32 sad_4pixel(int32 src1, int32 src2, int32 mask)
function int32 (line 77) | __inline int32 simd_sad_mb(uint8 *ref, uint8 *blk, int dmin, int lx)
function int32 (line 172) | __inline int32 SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 184) | __inline int32 sad_4pixel(int32 src1, int32 src2, int32 mask)
function int32 (line 203) | __inline int32 sad_4pixelN(int32 src1, int32 src2, int32 mask)
function int32 (line 252) | __inline int32 simd_sad_mb(uint8 *ref, uint8 *blk, int dmin, int lx)
function int32 (line 341) | __inline int32 SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 347) | __inline int32 sad_4pixel(int32 src1, int32 src2, int32 mask)
function int32 (line 356) | __inline int32 sad_4pixelN(int32 src1, int32 src2, int32 mask)
function int32 (line 390) | __inline int32 simd_sad_mb(uint8 *ref, uint8 *blk, int dmin, int lx)
FILE: RtspCamera/jni/avc_h264/enc/src/sad_mb_offset.h
function int32 (line 26) | __inline int32 sad_mb_offset1(uint8 *ref, uint8 *blk, int lx, int dmin)
FILE: RtspCamera/jni/avc_h264/enc/src/slice.cpp
function AVCEnc_Status (line 22) | AVCEnc_Status AVCEncodeSlice(AVCEncObject *encvid)
function AVCEnc_Status (line 154) | AVCEnc_Status EncodeMB(AVCEncObject *encvid)
function Copy_MB (line 512) | void Copy_MB(uint8 *curL, uint8 *curCb, uint8 *curCr, uint8 *predBlock, ...
function AVCEnc_Status (line 551) | AVCEnc_Status EncodeMBHeader(AVCMacroblock *currMB, AVCEncObject *encvid)
function uint (line 603) | uint InterpretMBType(AVCMacroblock *currMB, int slice_type)
function AVCEnc_Status (line 687) | AVCEnc_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCEnc...
function AVCEnc_Status (line 773) | AVCEnc_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AV...
function InterpretSubMBTypeP (line 869) | void InterpretSubMBTypeP(AVCMacroblock *mblock, uint *sub_mb_type)
function InterpretSubMBTypeB (line 886) | void InterpretSubMBTypeB(AVCMacroblock *mblock, uint *sub_mb_type)
function AVCEnc_Status (line 921) | AVCEnc_Status EncodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *cur...
FILE: RtspCamera/jni/avc_h264/enc/src/vlc_encode.cpp
function AVCEnc_Status (line 22) | AVCEnc_Status ue_v(AVCEncBitstream *bitstream, uint codeNum)
function AVCEnc_Status (line 32) | AVCEnc_Status se_v(AVCEncBitstream *bitstream, int value)
function AVCEnc_Status (line 51) | AVCEnc_Status te_v(AVCEncBitstream *bitstream, uint value, uint range)
function AVCEnc_Status (line 70) | AVCEnc_Status SetEGBitstring(AVCEncBitstream *bitstream, uint codeNum)
function AVCEnc_Status (line 106) | AVCEnc_Status EncodeCBP(AVCMacroblock *currMB, AVCEncBitstream *stream)
function AVCEnc_Status (line 125) | AVCEnc_Status ce_TotalCoeffTrailingOnes(AVCEncBitstream *stream, int Tra...
function AVCEnc_Status (line 194) | AVCEnc_Status ce_TotalCoeffTrailingOnesChromaDC(AVCEncBitstream *stream,...
function AVCEnc_Status (line 215) | AVCEnc_Status ce_TotalZeros(AVCEncBitstream *stream, int total_zeros, in...
function AVCEnc_Status (line 266) | AVCEnc_Status ce_TotalZerosChromaDC(AVCEncBitstream *stream, int total_z...
function AVCEnc_Status (line 294) | AVCEnc_Status ce_RunBefore(AVCEncBitstream *stream, int run_before, int ...
FILE: RtspCamera/jni/avc_h264/oscl/oscl_base.h
function class (line 25) | class OsclBase
function class (line 32) | class OsclErrorTrap
function class (line 41) | class OsclMem
function class (line 48) | class OsclRequestStatus
function class (line 68) | class OsclActiveObject
function class (line 226) | class OsclTimerObject
FILE: RtspCamera/jni/avc_h264/oscl/oscl_error_codes.h
type int32 (line 25) | typedef int32 OsclLeaveCode;
type int32 (line 51) | typedef int32 OsclReturnCode;
FILE: RtspCamera/jni/avc_h264/oscl/oscl_types.h
type int8 (line 40) | typedef signed char int8;
type uint8 (line 45) | typedef unsigned char uint8;
type int16 (line 50) | typedef short int16;
type uint16 (line 55) | typedef unsigned short uint16;
type int32 (line 60) | typedef long int32;
type uint32 (line 65) | typedef unsigned long uint32;
type sint8 (line 69) | typedef signed char sint8;
type OsclFloat (line 73) | typedef float OsclFloat;
type uint (line 77) | typedef unsigned int uint;
type OSCL_NATIVE_INT64_TYPE (line 84) | typedef OSCL_NATIVE_INT64_TYPE int64;
type OSCL_NATIVE_UINT64_TYPE (line 90) | typedef OSCL_NATIVE_UINT64_TYPE uint64;
FILE: RtspCamera/jni/m4v_h263/dec/include/mp4dec_api.h
type uint (line 35) | typedef uint Bool;
type OutputFrame (line 68) | typedef struct tagOutputFrame
type applicationData (line 74) | typedef struct tagApplicationData
type VideoDecControls (line 82) | typedef struct tagvideoDecControls
type MP4DecodingMode (line 102) | typedef enum
type MP4FrameType (line 109) | typedef enum
type VopHeaderInfo (line 114) | typedef struct tagVopHeaderInfo
type VideoRefCopyInfoPtr (line 128) | typedef struct tagVideoRefCopyInfoPtr
type VideoRefCopyInfoData (line 136) | typedef struct tagVideoRefCopyInfoData
type VideoRefCopyInfo (line 144) | typedef struct tagVideoRefCopyInfo
FILE: RtspCamera/jni/m4v_h263/dec/include/pvm4vdecoder.h
function class (line 34) | class PVM4VDecoder : public PVVideoDecoderInterface
FILE: RtspCamera/jni/m4v_h263/dec/include/pvm4vdecoder_dpi.h
function class (line 61) | class PVM4VDecoder_DPI : public PVVideoDecoderInterface
FILE: RtspCamera/jni/m4v_h263/dec/include/pvm4vdecoder_factory.h
function class (line 31) | class PVM4VDecoderFactory
FILE: RtspCamera/jni/m4v_h263/dec/include/pvvideodecoderinterface.h
function class (line 33) | class PVVideoDecoderInterface
FILE: RtspCamera/jni/m4v_h263/dec/include/visual_header.h
type uint (line 24) | typedef uint Bool;
type VolInfo (line 29) | typedef struct tagVolInfo
FILE: RtspCamera/jni/m4v_h263/dec/oscl/oscl_base.h
function class (line 25) | class OsclBase
function class (line 32) | class OsclErrorTrap
function class (line 41) | class OsclMem
function class (line 48) | class OsclRequestStatus
function class (line 68) | class OsclActiveObject
function class (line 226) | class OsclTimerObject
FILE: RtspCamera/jni/m4v_h263/dec/oscl/oscl_error_codes.h
type int32 (line 25) | typedef int32 OsclLeaveCode;
type int32 (line 51) | typedef int32 OsclReturnCode;
FILE: RtspCamera/jni/m4v_h263/dec/oscl/oscl_types.h
type int8 (line 37) | typedef signed char int8;
type uint8 (line 42) | typedef unsigned char uint8;
type int16 (line 47) | typedef short int16;
type uint16 (line 52) | typedef unsigned short uint16;
type int32 (line 57) | typedef long int32;
type uint32 (line 62) | typedef unsigned long uint32;
type sint8 (line 66) | typedef signed char sint8;
type OsclFloat (line 70) | typedef float OsclFloat;
type uint (line 74) | typedef unsigned int uint;
type OSCL_NATIVE_INT64_TYPE (line 81) | typedef OSCL_NATIVE_INT64_TYPE int64;
type OSCL_NATIVE_UINT64_TYPE (line 87) | typedef OSCL_NATIVE_UINT64_TYPE uint64;
FILE: RtspCamera/jni/m4v_h263/dec/src/3GPVideoParser.cpp
function uint32 (line 42) | uint32 EndienConvert (uint32 input){
function uint32 (line 49) | uint32 getUint32FromUint8Buffer (uint8* buffer,uint32 offset){
function int32 (line 56) | int32 findAtom (uint8* buffer,uint32 bufferSize, uint32 valueToFind){
function int32 (line 71) | int32 findAtom (uint32* buffer,uint32 bufferSize, uint32 valueToFind){
function cleanupParser (line 86) | int cleanupParser(void){
function Init3GPVideoParser (line 104) | int Init3GPVideoParser (char *filePath){
function getFrame (line 304) | int getFrame (uint8* aOutBuffer,uint32* aBufferSize, uint32* aTimestamp){
function release (line 335) | int release(){
function uint32 (line 347) | uint32 getVideoDuration (){
function uint32 (line 367) | uint32 getVideoWidth (){
function uint32 (line 376) | uint32 getVideoHeight(){
FILE: RtspCamera/jni/m4v_h263/dec/src/3GPVideoParser.h
type uint8 (line 13) | typedef unsigned char uint8;
type uint16 (line 14) | typedef unsigned short uint16;
type int16 (line 15) | typedef short int16;
type uint32 (line 16) | typedef unsigned long uint32;
type int32 (line 17) | typedef long int32;
type Atom (line 39) | typedef struct {
type sample (line 44) | struct sample {
type Sample (line 50) | typedef struct sample Sample;
FILE: RtspCamera/jni/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp
function AdaptiveSmooth_NoMMX (line 212) | void AdaptiveSmooth_NoMMX(
FILE: RtspCamera/jni/m4v_h263/dec/src/bitstream.cpp
function PV_STATUS (line 50) | PV_STATUS BitstreamFillCache(BitstreamDecVideo *stream)
function BitstreamReset (line 125) | void BitstreamReset(BitstreamDecVideo *stream, uint8 *buffer, int32 buff...
function BitstreamOpen (line 141) | int BitstreamOpen(BitstreamDecVideo *stream, int layer)
function BitstreamClose (line 165) | void BitstreamClose(BitstreamDecVideo * stream)
function PV_STATUS (line 178) | PV_STATUS BitstreamShowBits32HC(BitstreamDecVideo *stream, uint32 *code)
function PV_STATUS (line 196) | PV_STATUS BitstreamShowBits32(BitstreamDecVideo *stream, int nbits, uint...
function PV_STATUS (line 218) | PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nbits, uint...
function PV_STATUS (line 241) | PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *code)
function PV_STATUS (line 260) | PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *code)
function uint (line 273) | uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int nbits)
function uint (line 289) | uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream)
function uint (line 314) | uint BitstreamReadBits16(BitstreamDecVideo *stream, int nbits)
function uint (line 336) | uint BitstreamRead1Bits(BitstreamDecVideo *stream)
function PV_STATUS (line 358) | PV_STATUS PV_BitstreamFlushBitsCheck(BitstreamDecVideo *stream, int nbits)
function uint32 (line 385) | uint32 BitstreamReadBits32(BitstreamDecVideo *stream, int nbits)
function uint32 (line 399) | uint32 BitstreamReadBits32HC(BitstreamDecVideo *stream)
function PV_STATUS (line 418) | PV_STATUS BitstreamCheckEndBuffer(BitstreamDecVideo *stream)
function PV_STATUS (line 425) | PV_STATUS PV_BitstreamShowBitsByteAlign(BitstreamDecVideo *stream, int n...
function PV_STATUS (line 444) | PV_STATUS PV_BitstreamShowBitsByteAlignNoForceStuffing(BitstreamDecVideo...
function PV_STATUS (line 463) | PV_STATUS PV_BitstreamByteAlign(BitstreamDecVideo *stream)
function PV_STATUS (line 490) | PV_STATUS BitstreamByteAlignNoForceStuffing(BitstreamDecVideo *stream)
function int32 (line 516) | int32 getPointer(BitstreamDecVideo *stream)
function PV_STATUS (line 532) | PV_STATUS movePointerTo(BitstreamDecVideo *stream, int32 pos)
function Bool (line 568) | Bool validStuffing(BitstreamDecVideo *stream)
function Bool (line 580) | Bool validStuffing_h263(BitstreamDecVideo *stream)
function PV_STATUS (line 606) | PV_STATUS PVSearchNextH263Frame(BitstreamDecVideo *stream)
function PV_STATUS (line 635) | PV_STATUS PVSearchNextM4VFrame(BitstreamDecVideo *stream)
function PVLocateM4VFrameBoundary (line 655) | void PVLocateM4VFrameBoundary(BitstreamDecVideo *stream)
function PVLocateH263FrameBoundary (line 666) | void PVLocateH263FrameBoundary(BitstreamDecVideo *stream)
function PV_STATUS (line 685) | PV_STATUS quickSearchVideoPacketHeader(BitstreamDecVideo *stream, int ma...
function PV_STATUS (line 709) | PV_STATUS quickSearchH263SliceHeader(BitstreamDecVideo *stream)
function PV_STATUS (line 787) | PV_STATUS quickSearchMotionMarker(BitstreamDecVideo *stream)
function PV_STATUS (line 878) | PV_STATUS quickSearchDCM(BitstreamDecVideo *stream)
function PV_STATUS (line 953) | PV_STATUS quickSearchGOBHeader(BitstreamDecVideo *stream)
FILE: RtspCamera/jni/m4v_h263/dec/src/bitstream.h
function PV_STATUS (line 51) | __inline PV_STATUS BitstreamShowBits16(BitstreamDecVideo *stream, int nb...
function PV_STATUS (line 69) | __inline PV_STATUS BitstreamShow15Bits(BitstreamDecVideo *stream, uint *...
function PV_STATUS (line 83) | __inline PV_STATUS BitstreamShow13Bits(BitstreamDecVideo *stream, uint *...
function uint (line 95) | __inline uint BitstreamReadBits16_INLINE(BitstreamDecVideo *stream, int ...
function uint (line 110) | __inline uint BitstreamRead1Bits_INLINE(BitstreamDecVideo *stream)
FILE: RtspCamera/jni/m4v_h263/dec/src/block_idct.cpp
function MBlockIDCT (line 223) | void MBlockIDCT(VideoDecData *video)
function BlockIDCT_intra (line 251) | void BlockIDCT_intra(
function Copy_Blk_to_Vop (line 324) | void Copy_Blk_to_Vop(uint8 *dst, uint8 *pred, int width)
function BlockIDCT (line 349) | void BlockIDCT(
function idctrow (line 510) | void idctrow(
function idctrow_intra (line 621) | void idctrow_intra(
function idctcol (line 831) | void idctcol(
FILE: RtspCamera/jni/m4v_h263/dec/src/cal_dc_scaler.cpp
function cal_dc_scaler (line 150) | int cal_dc_scaler(
FILE: RtspCamera/jni/m4v_h263/dec/src/chv_filter.cpp
function CombinedHorzVertFilter (line 177) | void CombinedHorzVertFilter(
function CombinedHorzVertFilter_NoSoftDeblocking (line 454) | void CombinedHorzVertFilter_NoSoftDeblocking(
FILE: RtspCamera/jni/m4v_h263/dec/src/chvr_filter.cpp
function CombinedHorzVertRingFilter (line 23) | void CombinedHorzVertRingFilter(
FILE: RtspCamera/jni/m4v_h263/dec/src/com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.cpp
function deinitDecoder (line 41) | int deinitDecoder(){
function initDecoder (line 60) | int initDecoder(int srcWidth, int srcHeight){
function JNIEXPORT (line 89) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 99) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 109) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 160) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 200) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 212) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 223) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 234) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 245) | JNIEXPORT jstring JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_...
function JNIEXPORT (line 259) | JNIEXPORT jobject JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_...
function jint (line 322) | jint JNI_OnLoad(JavaVM* vm, void* reserved) {
FILE: RtspCamera/jni/m4v_h263/dec/src/combined_decode.cpp
function PV_STATUS (line 43) | PV_STATUS DecodeFrameCombinedMode(VideoDecData *video)
function PV_STATUS (line 346) | PV_STATUS GetMBheader(VideoDecData *video, int16 *QP)
function PV_STATUS (line 527) | PV_STATUS GetMBData(VideoDecData *video)
FILE: RtspCamera/jni/m4v_h263/dec/src/conceal.cpp
function ConcealTexture_I (line 31) | void ConcealTexture_I(VideoDecData *video, int32 startFirstPartition, in...
function ConcealTexture_P (line 81) | void ConcealTexture_P(VideoDecData *video, int mb_start, int mb_stop, in...
function ConcealPacket (line 115) | void ConcealPacket(VideoDecData *video,
function CopyVopMB (line 138) | void CopyVopMB(Vop *curr, uint8 *prevFrame, int mbnum, int width_Y, int ...
FILE: RtspCamera/jni/m4v_h263/dec/src/datapart_decode.cpp
function PV_STATUS (line 40) | PV_STATUS DecodeFrameDataPartMode(VideoDecData *video)
function PV_STATUS (line 139) | PV_STATUS DecodeDataPart_I_VideoPacket(VideoDecData *video, int slice_co...
function PV_STATUS (line 304) | PV_STATUS DecodeDataPart_P_VideoPacket(VideoDecData *video, int slice_co...
function PV_STATUS (line 517) | PV_STATUS GetMBheaderDataPart_DQUANT_DC(VideoDecData *video, int16 *QP)
function PV_STATUS (line 575) | PV_STATUS GetMBheaderDataPart_P(VideoDecData *video)
function PV_STATUS (line 619) | PV_STATUS GetMBData_DataPart(VideoDecData *video)
FILE: RtspCamera/jni/m4v_h263/dec/src/dcac_prediction.cpp
function doDCACPrediction (line 28) | void doDCACPrediction(
function doDCACPrediction_I (line 206) | void doDCACPrediction_I(
FILE: RtspCamera/jni/m4v_h263/dec/src/dec_pred_intra_dc.cpp
function PV_STATUS (line 23) | PV_STATUS PV_DecodePredictedIntraDC(
FILE: RtspCamera/jni/m4v_h263/dec/src/deringing_chroma.cpp
function Deringing_Chroma (line 23) | void Deringing_Chroma(
FILE: RtspCamera/jni/m4v_h263/dec/src/deringing_luma.cpp
function Deringing_Luma (line 23) | void Deringing_Luma(
FILE: RtspCamera/jni/m4v_h263/dec/src/find_min_max.cpp
function FindMaxMin (line 132) | void FindMaxMin(
FILE: RtspCamera/jni/m4v_h263/dec/src/get_pred_adv_b_add.cpp
function GetPredAdvancedBy0x0 (line 83) | int GetPredAdvancedBy0x0(
function GetPredAdvancedBy0x1 (line 183) | int GetPredAdvancedBy0x1(
function GetPredAdvancedBy1x0 (line 512) | int GetPredAdvancedBy1x0(
function GetPredAdvancedBy1x1 (line 859) | int GetPredAdvancedBy1x1(
FILE: RtspCamera/jni/m4v_h263/dec/src/get_pred_outside.cpp
function GetPredOutside (line 241) | int GetPredOutside(
FILE: RtspCamera/jni/m4v_h263/dec/src/idct.cpp
function idct_intra (line 121) | void idct_intra(
function idct (line 344) | void idct(
FILE: RtspCamera/jni/m4v_h263/dec/src/idct_vca.cpp
function idctrow0 (line 31) | void idctrow0(int16 *blk, uint8 *pred, uint8 *dst, int width)
function idctcol0 (line 39) | void idctcol0(int16 *blk)
function idctrow1 (line 45) | void idctrow1(int16 *blk, uint8 *pred, uint8 *dst, int width)
function idctcol1 (line 95) | void idctcol1(int16 *blk)
function idctrow2 (line 102) | void idctrow2(int16 *blk, uint8 *pred, uint8 *dst, int width)
function idctcol2 (line 163) | void idctcol2(int16 *blk)
function idctrow3 (line 190) | void idctrow3(int16 *blk, uint8 *pred, uint8 *dst, int width)
function idctcol3 (line 262) | void idctcol3(int16 *blk)
function idctrow4 (line 299) | void idctrow4(int16 *blk, uint8 *pred, uint8 *dst, int width)
function idctcol4 (line 376) | void idctcol4(int16 *blk)
function idctrow0_intra (line 419) | void idctrow0_intra(int16 *blk, PIXEL * comp, int width)
function idctrow1_intra (line 427) | void idctrow1_intra(int16 *blk, PIXEL *comp, int width)
function idctrow2_intra (line 456) | void idctrow2_intra(int16 *blk, PIXEL *comp, int width)
function idctrow3_intra (line 513) | void idctrow3_intra(int16 *blk, PIXEL *comp, int width)
function idctrow4_intra (line 586) | void idctrow4_intra(int16 *blk, PIXEL *comp, int width)
FILE: RtspCamera/jni/m4v_h263/dec/src/mb_motion_comp.cpp
function MBMotionComp (line 119) | void MBMotionComp(
function SkippedMBMotionComp (line 548) | void SkippedMBMotionComp(
FILE: RtspCamera/jni/m4v_h263/dec/src/mb_utils.cpp
function PutSKIPPED_MB (line 25) | void PutSKIPPED_MB(uint8 *comp, uint8 *prev, int width)
function PutSKIPPED_B (line 86) | void PutSKIPPED_B(uint8 *comp, uint8 *prev, int width)
FILE: RtspCamera/jni/m4v_h263/dec/src/mp4def.h
type PV_STATUS (line 23) | typedef enum
type uint8 (line 35) | typedef uint8 PIXEL;
type int16 (line 36) | typedef int16 MOT;
FILE: RtspCamera/jni/m4v_h263/dec/src/mp4lib_int.h
type BitstreamDecVideo (line 32) | typedef struct tagBitstream
type Complexity_Est (line 48) | typedef struct tagComplexity_Est
type Vop (line 56) | typedef struct tagVop
type Vol (line 82) | typedef struct tagVol
type int16 (line 132) | typedef int16 typeMBStore[6][NCOEFF_BLOCK];
type MacroBlock (line 134) | typedef struct tagMacroBlock
type HeaderInfoDecVideo (line 148) | typedef struct tagHeaderInfoDecVideo
type Tcoef (line 158) | typedef struct tagTcoef
type VLCtab (line 168) | typedef struct tagVLCtab
type VLCshorttab (line 174) | typedef struct tagVLCshorttab
type VLCtab2 (line 180) | typedef struct tagVLCtab2
type int16 (line 193) | typedef int16 typeDCStore[6];
type int16 (line 194) | typedef int16 typeDCACStore[4][8];
type VideoDecData (line 199) | typedef struct tagVideoDecData
FILE: RtspCamera/jni/m4v_h263/dec/src/packet_util.cpp
function PV_STATUS (line 28) | PV_STATUS PV_ReadVideoPacketHeader(VideoDecData *video, int *next_MB)
function PV_STATUS (line 144) | PV_STATUS PV_GobHeader(VideoDecData *video)
function PV_STATUS (line 194) | PV_STATUS PV_H263SliceHeader(VideoDecData *video, int *next_MB)
FILE: RtspCamera/jni/m4v_h263/dec/src/post_filter.cpp
function PostFilter (line 31) | void PostFilter(
function H263_Deblock (line 176) | void H263_Deblock(uint8 *rec,
FILE: RtspCamera/jni/m4v_h263/dec/src/post_proc_semaphore.cpp
function PostProcSemaphore (line 128) | int PostProcSemaphore(
FILE: RtspCamera/jni/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp
function pp_semaphore_chroma_inter (line 111) | void pp_semaphore_chroma_inter(
FILE: RtspCamera/jni/m4v_h263/dec/src/pp_semaphore_luma.cpp
function uint8 (line 112) | uint8 pp_semaphore_luma(
FILE: RtspCamera/jni/m4v_h263/dec/src/pvdec_api.cpp
function OSCL_EXPORT_REF (line 68) | OSCL_EXPORT_REF Bool PVInitVideoDecoder(VideoDecControls *decCtrl, uint8...
function Bool (line 294) | Bool PVAllocVideoData(VideoDecControls *decCtrl, int width, int height, ...
function Bool (line 538) | Bool PVResetVideoDecoder(VideoDecControls *decCtrl)
function OSCL_EXPORT_REF (line 565) | OSCL_EXPORT_REF Bool PVCleanUpVideoDecoder(VideoDecControls *decCtrl)
function OSCL_EXPORT_REF (line 730) | OSCL_EXPORT_REF void PVGetVideoDimensions(VideoDecControls *decCtrl, int...
function uint32 (line 746) | uint32 PVGetVideoTimeStamp(VideoDecControls *decCtrl)
function OSCL_EXPORT_REF (line 762) | OSCL_EXPORT_REF void PVSetPostProcType(VideoDecControls *decCtrl, int mode)
function PVGetDecBitrate (line 778) | int PVGetDecBitrate(VideoDecControls *decCtrl)
function PVGetDecFramerate (line 802) | int PVGetDecFramerate(VideoDecControls *decCtrl)
function uint8 (line 818) | uint8 *PVGetDecOutputFrame(VideoDecControls *decCtrl)
function PVGetLayerID (line 832) | int PVGetLayerID(VideoDecControls *decCtrl)
function int32 (line 846) | int32 PVGetDecMemoryUsage(VideoDecControls *decCtrl)
function OSCL_EXPORT_REF (line 863) | OSCL_EXPORT_REF MP4DecodingMode PVGetDecBitstreamMode(VideoDecControls *...
function Bool (line 893) | Bool PVExtractVolHeader(uint8 *video_buffer, uint8 *vol_header, int32 *v...
function int32 (line 941) | int32 PVLocateFrameHeader(uint8 *ptr, int32 size)
function int32 (line 976) | int32 PVLocateH263FrameHeader(uint8 *ptr, int32 size)
function OSCL_EXPORT_REF (line 1016) | OSCL_EXPORT_REF Bool PVDecodeVideoFrame(VideoDecControls *decCtrl, uint8...
function Bool (line 1265) | Bool PVDecodeVopBody(VideoDecControls *decCtrl, int32 buffer_size[])
function OSCL_EXPORT_REF (line 1480) | OSCL_EXPORT_REF void PVSetReferenceYUV(VideoDecControls *decCtrl, uint8 ...
function uint (line 1504) | uint VideoDecoderErrorDetected(VideoDecData * video)
function m4vdec_dprintf (line 1526) | void m4vdec_dprintf(char *format, ...)
function Bool (line 1553) | Bool IsIntraFrame(VideoDecControls *decCtrl)
function PVDecPostProcess (line 1568) | void PVDecPostProcess(VideoDecControls *decCtrl, uint8 *outputYUV)
function Bool (line 1624) | Bool PVDecSetReference(VideoDecControls *decCtrl, uint8 *refYUV, uint32 ...
function Bool (line 1665) | Bool PVDecSetEnhReference(VideoDecControls *decCtrl, uint8 *refYUV, uint...
function Bool (line 1706) | Bool PVGetVolInfo(VideoDecControls *decCtrl, VolInfo *pVolInfo)
FILE: RtspCamera/jni/m4v_h263/dec/src/pvm4vdecoder.cpp
function PVM4VDecoder (line 39) | PVM4VDecoder* PVM4VDecoder::New(void)
function uint32 (line 145) | uint32 PVM4VDecoder::GetVideoTimestamp(void)
function uint8 (line 163) | uint8* PVM4VDecoder::GetDecOutputFrame(void)
function uint32 (line 189) | uint32 PVM4VDecoder::GetDecBitrate(void)
function uint32 (line 195) | uint32 PVM4VDecoder::GetProfileAndLevel(void)
FILE: RtspCamera/jni/m4v_h263/dec/src/pvm4vdecoder_factory.cpp
function PVVideoDecoderInterface (line 38) | PVVideoDecoderInterface* PVM4VDecoderFactory::CreatePVM4VDecoder()
function OSCL_EXPORT_REF (line 50) | OSCL_EXPORT_REF bool PVM4VDecoderFactory::DeletePVM4VDecoder(PVVideoDeco...
FILE: RtspCamera/jni/m4v_h263/dec/src/vlc_decode.cpp
function PV_STATUS (line 103) | PV_STATUS DecodeUserData(BitstreamDecVideo *stream)
function PV_STATUS (line 267) | PV_STATUS PV_DecodeMBVec(BitstreamDecVideo *stream, MOT *mv_x, MOT *mv_y...
function PV_STATUS (line 319) | PV_STATUS PV_DeScaleMVD(
function mv_prediction (line 353) | void mv_prediction(
function PV_STATUS (line 452) | PV_STATUS PV_VlcDecMV(BitstreamDecVideo *stream, int *mv)
function PV_VlcDecMCBPC_com_intra (line 503) | int PV_VlcDecMCBPC_com_intra(BitstreamDecVideo *stream)
function PV_VlcDecMCBPC_com_inter (line 536) | int PV_VlcDecMCBPC_com_inter(BitstreamDecVideo *stream)
function PV_VlcDecMCBPC_com_inter_H263 (line 557) | int PV_VlcDecMCBPC_com_inter_H263(BitstreamDecVideo *stream)
function PV_VlcDecCBPY (line 590) | int PV_VlcDecCBPY(BitstreamDecVideo *stream, int intra)
function PV_STATUS (line 628) | PV_STATUS PV_VlcDecIntraDCPredSize(BitstreamDecVideo *stream, int compnu...
function PV_STATUS (line 839) | PV_STATUS VlcDecTCOEFIntra(BitstreamDecVideo *stream, Tcoef *pTcoef)
function PV_STATUS (line 1022) | PV_STATUS VlcDecTCOEFInter(BitstreamDecVideo *stream, Tcoef *pTcoef)
function PV_STATUS (line 1201) | PV_STATUS VlcDecTCOEFShortHeader(BitstreamDecVideo *stream, Tcoef *pTcoe...
function PV_STATUS (line 1262) | PV_STATUS VlcDecTCOEFShortHeader_AnnexI(BitstreamDecVideo *stream, Tcoef...
function PV_STATUS (line 1317) | PV_STATUS VlcDecTCOEFShortHeader_AnnexT(BitstreamDecVideo *stream, Tcoef...
function PV_STATUS (line 1390) | PV_STATUS VlcDecTCOEFShortHeader_AnnexIT(BitstreamDecVideo *stream, Tcoe...
function PV_STATUS (line 1472) | PV_STATUS RvlcDecTCOEFInter(BitstreamDecVideo *stream, Tcoef *pTcoef)
function PV_STATUS (line 1555) | PV_STATUS RvlcDecTCOEFIntra(BitstreamDecVideo *stream, Tcoef *pTcoef)
FILE: RtspCamera/jni/m4v_h263/dec/src/vlc_dequant.cpp
function VlcDequantMpegIntraBlock (line 50) | int VlcDequantMpegIntraBlock(void *vid, int comp, int switched,
function VlcDequantMpegInterBlock (line 383) | int VlcDequantMpegInterBlock(void *vid, int comp,
function VlcDequantH263IntraBlock (line 502) | int VlcDequantH263IntraBlock(VideoDecData *video, int comp, int switched,
function VlcDequantH263IntraBlock_SH (line 805) | int VlcDequantH263IntraBlock_SH(VideoDecData *video, int comp, uint8 *bi...
function VlcDequantH263InterBlock (line 1066) | int VlcDequantH263InterBlock(VideoDecData *video, int comp,
FILE: RtspCamera/jni/m4v_h263/dec/src/vop.cpp
function CalcNumBits (line 64) | int CalcNumBits(uint x)
function PV_STATUS (line 81) | PV_STATUS DecodeVOLHeader(VideoDecData *video, int layer)
function PV_STATUS (line 741) | PV_STATUS DecodeGOVHeader(BitstreamDecVideo *stream, uint32 *time_base)
function PV_STATUS (line 805) | PV_STATUS DecodeVOPHeader(VideoDecData *video, Vop *currVop, Bool use_ex...
function PV_STATUS (line 1008) | PV_STATUS DecodeShortHeader(VideoDecData *video, Vop *currVop)
function PV_STATUS (line 1018) | PV_STATUS DecodeH263Header(VideoDecData *video, Vop *currVop)
function PV_STATUS (line 1483) | PV_STATUS PV_DecodeVop(VideoDecData *video)
function uint32 (line 1579) | uint32 CalcVopDisplayTime(Vol *currVol, Vop *currVop, int shortVideoHeader)
FILE: RtspCamera/jni/m4v_h263/dec/src/yuv2rgb.cpp
function convert (line 10) | int convert (int width,int height, uint8 *in,uint32 *out){
FILE: RtspCamera/jni/m4v_h263/enc/include/cvei.h
type TCVEI_RETVAL (line 54) | enum TCVEI_RETVAL
type TCVEI_EVENT (line 63) | enum TCVEI_EVENT
type TPVVideoFormat (line 79) | enum TPVVideoFormat
type TPVContentType (line 89) | enum TPVContentType
type TMP4RateControlType (line 102) | enum TMP4RateControlType
type TPVM4VProfileLevel (line 115) | enum TPVM4VProfileLevel
type TPVVideoEncodeParam (line 137) | struct TPVVideoEncodeParam
type TPVVideoInputFormat (line 263) | struct TPVVideoInputFormat
type TPVVideoInputData (line 283) | struct TPVVideoInputData
type TPVVideoOutputData (line 293) | struct TPVVideoOutputData
function class (line 318) | class MPVCVEIObserver
function class (line 331) | class CommonVideoEncoder : public OsclTimerObject
FILE: RtspCamera/jni/m4v_h263/enc/include/mp4enc_api.h
type UChar (line 27) | typedef unsigned char UChar;
type Char (line 28) | typedef char Char;
type UInt (line 29) | typedef unsigned int UInt;
type Int (line 30) | typedef int Int;
type UShort (line 31) | typedef unsigned short UShort;
type Short (line 32) | typedef short Short;
type Bool (line 33) | typedef unsigned int Bool;
type ULong (line 34) | typedef unsigned long ULong;
type MP4EncodingMode (line 43) | typedef enum
type MP4RateControlType (line 55) | typedef enum
type PassNum (line 65) | typedef enum
type ParamEncMode (line 71) | typedef enum
type ProfileLevelType (line 81) | typedef enum
type MP4HintTrack (line 106) | typedef struct tagMP4HintTrack
type VideoEncControls (line 114) | typedef struct tagvideoEncControls
type VideoEncFrameIO (line 121) | typedef struct tagvideoEncFrameIO
type VideoEncOptions (line 134) | typedef struct tagvideoEncOptions
FILE: RtspCamera/jni/m4v_h263/enc/include/pvm4vencoder.h
type TMP4EncodingMode (line 53) | enum TMP4EncodingMode
type TParamEncMode (line 69) | enum TParamEncMode
type mp4StreamType (line 75) | typedef struct
function class (line 100) | class CPVM4VEncoder : public CommonVideoEncoder
FILE: RtspCamera/jni/m4v_h263/enc/oscl/oscl_base.h
function class (line 25) | class OsclBase
function class (line 32) | class OsclErrorTrap
function class (line 41) | class OsclMem
function class (line 48) | class OsclRequestStatus
function class (line 68) | class OsclActiveObject
function class (line 226) | class OsclTimerObject
FILE: RtspCamera/jni/m4v_h263/enc/oscl/oscl_error_codes.h
type int32 (line 25) | typedef int32 OsclLeaveCode;
type int32 (line 51) | typedef int32 OsclReturnCode;
FILE: RtspCamera/jni/m4v_h263/enc/oscl/oscl_types.h
type int8 (line 37) | typedef signed char int8;
type uint8 (line 42) | typedef unsigned char uint8;
type int16 (line 47) | typedef short int16;
type uint16 (line 52) | typedef unsigned short uint16;
type int32 (line 57) | typedef long int32;
type uint32 (line 62) | typedef unsigned long uint32;
type sint8 (line 66) | typedef signed char sint8;
type OsclFloat (line 70) | typedef float OsclFloat;
type uint (line 74) | typedef unsigned int uint;
type OSCL_NATIVE_INT64_TYPE (line 81) | typedef OSCL_NATIVE_INT64_TYPE int64;
type OSCL_NATIVE_UINT64_TYPE (line 87) | typedef OSCL_NATIVE_UINT64_TYPE uint64;
FILE: RtspCamera/jni/m4v_h263/enc/src/bitstream_io.cpp
function BitstreamEncVideo (line 51) | BitstreamEncVideo *BitStreamCreateEnc(Int bufferSize)
function Void (line 92) | Void BitstreamCloseEnc(BitstreamEncVideo *stream)
function PV_STATUS (line 119) | PV_STATUS BitstreamPutBits(BitstreamEncVideo *stream, Int Length, UInt V...
function PV_STATUS (line 167) | PV_STATUS BitstreamPutGT16Bits(BitstreamEncVideo *stream, Int Length, UL...
function PV_STATUS (line 206) | PV_STATUS BitstreamSaveWord(BitstreamEncVideo *stream)
function PV_STATUS (line 256) | PV_STATUS BitstreamSavePartial(BitstreamEncVideo *stream, Int *fraction)
function Int (line 325) | Int BitstreamShortHeaderByteAlignStuffing(BitstreamEncVideo *stream)
function Int (line 354) | Int BitstreamMpeg4ByteAlignStuffing(BitstreamEncVideo *stream)
function PV_STATUS (line 415) | PV_STATUS BitstreamAppendEnc(BitstreamEncVideo *bitstream1, BitstreamEnc...
function PV_STATUS (line 492) | PV_STATUS BitstreamAppendPacket(BitstreamEncVideo *bitstream1, Bitstream...
function PV_STATUS (line 531) | PV_STATUS BitstreamAppendPacketNoOffset(BitstreamEncVideo *bitstream1, B...
function PV_STATUS (line 580) | PV_STATUS BitstreamRepos(BitstreamEncVideo *bitstream, Int byteCount, ...
function PV_STATUS (line 624) | PV_STATUS BitstreamFlushBits(BitstreamEncVideo *bitstream1, Int num_bit_...
function PV_STATUS (line 680) | PV_STATUS BitstreamPrependPacket(BitstreamEncVideo *bitstream1, Bitstrea...
function Int (line 759) | Int BitstreamGetPos(BitstreamEncVideo *stream)
function BitstreamEncReset (line 765) | void BitstreamEncReset(BitstreamEncVideo *stream)
function Void (line 775) | Void BitstreamSetOverrunBuffer(BitstreamEncVideo* stream, UChar* overru...
function PV_STATUS (line 786) | PV_STATUS BitstreamUseOverrunBuffer(BitstreamEncVideo* stream, Int numEx...
FILE: RtspCamera/jni/m4v_h263/enc/src/com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.cpp
function JNIEXPORT (line 29) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function JNIEXPORT (line 207) | JNIEXPORT jbyteArray JNICALL Java_com_orangelabs_rcs_core_ims_protocol_r...
function JNIEXPORT (line 275) | JNIEXPORT jint JNICALL Java_com_orangelabs_rcs_core_ims_protocol_rtp_cod...
function jint (line 283) | jint JNI_OnLoad(JavaVM* vm, void* reserved) {
FILE: RtspCamera/jni/m4v_h263/enc/src/combined_encode.cpp
function PV_STATUS (line 37) | PV_STATUS EncodeFrameCombinedMode(VideoEncData *video)
function PV_STATUS (line 262) | PV_STATUS EncodeSliceCombinedMode(VideoEncData *video)
function PV_STATUS (line 681) | PV_STATUS EncodeGOBHeader(VideoEncData *video, Int GOB_number, Int quant...
FILE: RtspCamera/jni/m4v_h263/enc/src/datapart_encode.cpp
function PV_STATUS (line 36) | PV_STATUS EncodeFrameDataPartMode(VideoEncData *video)
function PV_STATUS (line 211) | PV_STATUS EncodeSliceDataPartMode(VideoEncData *video)
FILE: RtspCamera/jni/m4v_h263/enc/src/dct.cpp
function Void (line 39) | Void BlockDCT_AANwSub(Short *out, UChar *cur, UChar *pred, Int width)
function Void (line 268) | Void Block4x4DCT_AANwSub(Short *out, UChar *cur, UChar *pred, Int width)
function Void (line 474) | Void Block2x2DCT_AANwSub(Short *out, UChar *cur, UChar *pred, Int width)
function Void (line 653) | Void BlockDCT_AANIntra(Short *out, UChar *cur, UChar *dummy2, Int width)
function Void (line 864) | Void Block4x4DCT_AANIntra(Short *out, UChar *cur, UChar *dummy2, Int width)
function Void (line 1051) | Void Block2x2DCT_AANIntra(Short *out, UChar *cur, UChar *dummy2, Int width)
function Block1x1DCTwSub (line 1211) | void Block1x1DCTwSub(Short *out, UChar *cur, UChar *pred, Int width)
function Block1x1DCTIntra (line 1248) | void Block1x1DCTIntra(Short *out, UChar *cur, UChar *dummy2, Int width)
FILE: RtspCamera/jni/m4v_h263/enc/src/dct_inline.h
function int32 (line 29) | __inline int32 mla724(int32 op1, int32 op2, int32 op3)
function int32 (line 40) | __inline int32 mla392(int32 k0, int32 k14, int32 round)
function int32 (line 51) | __inline int32 mla554(int32 k4, int32 k12, int32 k1)
function int32 (line 62) | __inline int32 mla1338(int32 k6, int32 k14, int32 k1)
function int32 (line 73) | __inline int32 mla946(int32 k6, int32 k14, int32 k1)
function int32 (line 84) | __inline int32 sum_abs(int32 k0, int32 k1, int32 k2, int32 k3,
function int32 (line 243) | __inline int32 sum_abs(int32 k0, int32 k1, int32 k2, int32 k3,
function int32 (line 271) | __inline int32 mla724(int32 op1, int32 op2, int32 op3)
function int32 (line 287) | __inline int32 mla392(int32 k0, int32 k14, int32 round)
function int32 (line 303) | __inline int32 mla554(int32 k4, int32 k12, int32 k1)
function int32 (line 319) | __inline int32 mla1338(int32 k6, int32 k14, int32 k1)
function int32 (line 334) | __inline int32 mla946(int32 k6, int32 k14, int32 k1)
function int32 (line 349) | __inline int32 sum_abs(int32 k0, int32 k1, int32 k2, int32 k3,
FILE: RtspCamera/jni/m4v_h263/enc/src/fastcodemb.cpp
function PV_STATUS (line 51) | PV_STATUS CodeMB_H263(VideoEncData *video, approxDCT *function, Int QP, ...
function PV_STATUS (line 280) | PV_STATUS CodeMB_MPEG(VideoEncData *video, approxDCT *function, Int QP, ...
function Int (line 485) | Int getBlockSAV(Short block[])
function Int (line 531) | Int Sad8x8(UChar *cur, UChar *prev, Int width)
function Int (line 595) | Int getBlockSum(UChar *cur, Int width)
FILE: RtspCamera/jni/m4v_h263/enc/src/fastcodemb.h
type approxDCT (line 28) | typedef struct struct_approxDCT approxDCT;
type struct_approxDCT (line 29) | struct struct_approxDCT
type QPstruct (line 42) | struct QPstruct
type QPstruct (line 68) | struct QPstruct
type QPstruct (line 72) | struct QPstruct
type QPstruct (line 76) | struct QPstruct
type QPstruct (line 79) | struct QPstruct
FILE: RtspCamera/jni/m4v_h263/enc/src/fastidct.cpp
function idct_col0 (line 43) | void idct_col0(Short *blk)
function idct_col1 (line 50) | void idct_col1(Short *blk)
function idct_col2 (line 57) | void idct_col2(Short *blk)
function idct_col3 (line 83) | void idct_col3(Short *blk)
function idct_col4 (line 118) | void idct_col4(Short *blk)
function idct_col0x40 (line 161) | void idct_col0x40(Short *blk)
function idct_col0x20 (line 187) | void idct_col0x20(Short *blk)
function idct_col0x10 (line 211) | void idct_col0x10(Short *blk)
function idct_col (line 237) | void idct_col(Short *blk)
function idct_row0Inter (line 291) | void idct_row0Inter(Short *srce, UChar *rec, Int lx)
function idct_row1Inter (line 302) | void idct_row1Inter(Short *blk, UChar *rec, Int lx)
function idct_row2Inter (line 349) | void idct_row2Inter(Short *blk, UChar *rec, Int lx)
function idct_row3Inter (line 408) | void idct_row3Inter(Short *blk, UChar *rec, Int lx)
function idct_row4Inter (line 478) | void idct_row4Inter(Short *blk, UChar *rec, Int lx)
function idct_row0x40Inter (line 554) | void idct_row0x40Inter(Short *blk, UChar *rec, Int lx)
function idct_row0x20Inter (line 611) | void idct_row0x20Inter(Short *blk, UChar *rec, Int lx)
function idct_row0x10Inter (line 667) | void idct_row0x10Inter(Short *blk, UChar *rec, Int lx)
function idct_rowInter (line 722) | void idct_rowInter(Short *blk, UChar *rec, Int lx)
function idct_row0Intra (line 813) | void idct_row0Intra(Short *srce, UChar *rec, Int lx)
function idct_row1Intra (line 824) | void idct_row1Intra(Short *blk, UChar *rec, Int lx)
function idct_row2Intra (line 845) | void idct_row2Intra(Short *blk, UChar *rec, Int lx)
function idct_row3Intra (line 900) | void idct_row3Intra(Short *blk, UChar *rec, Int lx)
function idct_row4Intra (line 966) | void idct_row4Intra(Short *blk, UChar *rec, Int lx)
function idct_row0x40Intra (line 1039) | void idct_row0x40Intra(Short *blk, UChar *rec, Int lx)
function idct_row0x20Intra (line 1094) | void idct_row0x20Intra(Short *blk, UChar *rec, Int lx)
function idct_row0x10Intra (line 1147) | void idct_row0x10Intra(Short *blk, UChar *rec, Int lx)
function idct_rowIntra (line 1199) | void idct_rowIntra(Short *blk, UChar *rec, Int lx)
function idct_row0zmv (line 1287) | void idct_row0zmv(Short *srce, UChar *rec, UChar *pred, Int lx)
function idct_row1zmv (line 1297) | void idct_row1zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_row2zmv (line 1345) | void idct_row2zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_row3zmv (line 1405) | void idct_row3zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_row4zmv (line 1476) | void idct_row4zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_row0x40zmv (line 1553) | void idct_row0x40zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_row0x20zmv (line 1611) | void idct_row0x20zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_row0x10zmv (line 1668) | void idct_row0x10zmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function idct_rowzmv (line 1724) | void idct_rowzmv(Short *blk, UChar *rec, UChar *pred, Int lx)
function BlockIDCTMotionComp (line 1835) | void BlockIDCTMotionComp(Short *block, UChar *bitmapcol, UChar bitmaprow,
FILE: RtspCamera/jni/m4v_h263/enc/src/fastquant.cpp
function Int (line 90) | Int cal_dc_scalerENC(Int QP, Int type)
function Int (line 149) | Int BlockQuantDequantH263Inter(Short *rcoeff, Short *qcoeff, struct QPst...
function Int (line 269) | Int BlockQuantDequantH263Intra(Short *rcoeff, Short *qcoeff, struct QPst...
function Int (line 441) | Int BlockQuantDequantH263DCInter(Short *rcoeff, Short *qcoeff, struct QP...
function Int (line 495) | Int BlockQuantDequantH263DCIntra(Short *rcoeff, Short *qcoeff, struct QP...
function Int (line 544) | Int BlockQuantDequantMPEGInter(Short *rcoeff, Short *qcoeff, Int QP, Int...
function Int (line 673) | Int BlockQuantDequantMPEGIntra(Short *rcoeff, Short *qcoeff, Int QP, Int...
function Int (line 852) | Int BlockQuantDequantMPEGDCInter(Short *rcoeff, Short *qcoeff, Int QP, I...
function Int (line 919) | Int BlockQuantDequantMPEGDCIntra(Short *rcoeff, Short *qcoeff, Int QP, I...
FILE: RtspCamera/jni/m4v_h263/enc/src/fastquant_inline.h
function int32 (line 31) | __inline int32 aan_scale(int32 q_value, int32 coeff, int32 round, int32 ...
function int32 (line 42) | __inline int32 coeff_quant(int32 coeff, int32 q_scale, int32 shift)
function int32 (line 53) | __inline int32 coeff_clip(int32 q_value, int32 ac_clip)
function int32 (line 63) | __inline int32 coeff_dequant(int32 q_value, int32 QPx2, int32 Addition, ...
function int32 (line 84) | __inline int32 smlabb(int32 q_value, int32 coeff, int32 round)
function int32 (line 91) | __inline int32 smulbb(int32 q_scale, int32 coeff)
function int32 (line 100) | __inline int32 aan_dc_scale(int32 coeff, int32 QP)
function int32 (line 109) | __inline int32 clip_2047(int32 q_value, int32 tmp)
function int32 (line 125) | __inline int32 coeff_dequant_mpeg(int32 q_value, int32 stepsize, int32 Q...
function int32 (line 150) | __inline int32 coeff_dequant_mpeg_intra(int32 q_value, int32 tmp)
function int32 (line 429) | __inline int32 aan_scale(int32 q_value, int32 coeff,
function int32 (line 450) | __inline int32 coeff_quant(int32 coeff, int32 q_scale, int32 shift)
function int32 (line 470) | __inline int32 coeff_clip(int32 q_value, int32 ac_clip)
function int32 (line 484) | __inline int32 coeff_dequant(int32 q_value, int32 QPx2, int32 Addition, ...
function int32 (line 511) | __inline int32 smlabb(int32 q_value, int32 coeff, int32 round)
function int32 (line 526) | __inline int32 smulbb(int32 q_scale, int32 coeff)
function int32 (line 539) | __inline int32 aan_dc_scale(int32 coeff, int32 QP)
function int32 (line 554) | __inline int32 clip_2047(int32 q_value, int32 tmp)
function int32 (line 568) | __inline int32 coeff_dequant_mpeg(int32 q_value, int32 stepsize, int32 Q...
function int32 (line 599) | __inline int32 coeff_dequant_mpeg_intra(int32 q_value, int32 tmp)
FILE: RtspCamera/jni/m4v_h263/enc/src/findhalfpel.cpp
function FindHalfPelMB (line 65) | void FindHalfPelMB(VideoEncData *video, UChar *cur, MOT *mot, UChar *ncand,
function Int (line 184) | Int FindHalfPelBlk(VideoEncData *video, UChar *cur, MOT *mot, Int sad16,...
FILE: RtspCamera/jni/m4v_h263/enc/src/me_utils.cpp
function ChooseMode_C (line 35) | void ChooseMode_C(UChar *Mode, UChar *cur, Int lx, Int min_SAD)
function GetHalfPelMBRegion_C (line 110) | void GetHalfPelMBRegion_C(UChar *cand, UChar *hmem, Int lx)
function GetHalfPelBlkRegion (line 166) | void GetHalfPelBlkRegion(UChar *cand, UChar *hmem, Int lx)
function PaddingEdge (line 218) | void PaddingEdge(Vop *refVop)
function ComputeMBSum_C (line 313) | void ComputeMBSum_C(UChar *cur, Int lx, MOT *mot_mb)
FILE: RtspCamera/jni/m4v_h263/enc/src/motion_comp.cpp
function getMotionCompensatedMB (line 99) | void getMotionCompensatedMB(VideoEncData *video, Int ind_x, Int ind_y, I...
function EncPrediction_INTER (line 201) | void EncPrediction_INTER(
function EncPrediction_INTER4V (line 240) | void EncPrediction_INTER4V(
function EncPrediction_Chrom (line 293) | void EncPrediction_Chrom(
function Int (line 351) | Int GetPredAdvBy0x0(
function Int (line 450) | Int GetPredAdvBy0x1(
function Int (line 775) | Int GetPredAdvBy1x0(
function Int (line 1118) | Int GetPredAdvBy1x1(
function Int (line 1521) | Int EncGetPredOutside(Int xpos, Int ypos, UChar *c_prev, UChar *rec,
function Copy_MB_from_Vop (line 1718) | void Copy_MB_from_Vop(UChar *comp, Int yChan[][NCOEFF_BLOCK], Int pitch)
function Copy_B_from_Vop (line 1766) | void Copy_B_from_Vop(UChar *comp, Int cChan[], Int pitch)
function Copy_MB_into_Vop (line 1796) | void Copy_MB_into_Vop(UChar *comp, Int yChan[][NCOEFF_BLOCK], Int pitch)
function Copy_B_into_Vop (line 1864) | void Copy_B_into_Vop(UChar *comp, Int cChan[], Int pitch)
function get_MB (line 1906) | void get_MB(UChar *c_prev, UChar *c_prev_u , UChar *c_prev_v,
function PutSkippedBlock (line 1945) | void PutSkippedBlock(UChar *rec, UChar *prev, Int lx)
FILE: RtspCamera/jni/m4v_h263/enc/src/motion_est.cpp
function MotionEstimation (line 136) | void MotionEstimation(VideoEncData *video)
function InitHTFM (line 534) | void InitHTFM(VideoEncData *video, HTFM_Stat *htfm_stat, double *newvar,...
function UpdateHTFM (line 619) | void UpdateHTFM(VideoEncData *video, double *newvar, double *exp_lamda, ...
function CalcThreshold (line 651) | void CalcThreshold(double pf, double exp_lamda[], Int nrmlz_th[])
function HTFMPrepareCurMB (line 674) | void HTFMPrepareCurMB(VideoEncData *video, HTFM_Stat *htfm_stat, UCha...
function PrepareCurMB (line 739) | void PrepareCurMB(VideoEncData *video, UChar *cur)
function MBMotionSearch (line 770) | void MBMotionSearch(VideoEncData *video, UChar *cur, UChar *best_cand[],
function Int (line 1179) | Int fullsearch(VideoEncData *video, Vol *currVol, UChar *prev, UChar *cur,
function Int (line 1266) | Int fullsearchBlk(VideoEncData *video, Vol *currVol, UChar *cent, UChar ...
function CandidateSelection (line 1352) | void CandidateSelection(Int *mvx, Int *mvy, Int *num_can, Int imb, Int jmb,
function RasterIntraUpdate (line 1573) | void RasterIntraUpdate(UChar *intraArray, UChar *Mode, Int totalMB, Int ...
function ResetIntraUpdate (line 1611) | void ResetIntraUpdate(UChar *intraArray, Int totalMB)
function ResetIntraUpdateRegion (line 1622) | void ResetIntraUpdateRegion(UChar *intraArray, Int start_i, Int rwidth,
function MoveNeighborSAD (line 1647) | void MoveNeighborSAD(Int dn[], Int new_loc)
function Int (line 1721) | Int FindMin(Int dn[])
FILE: RtspCamera/jni/m4v_h263/enc/src/mp4def.h
type UChar (line 43) | typedef unsigned char UChar;
type Char (line 44) | typedef char Char;
type UInt (line 45) | typedef unsigned int UInt;
type Int (line 46) | typedef int Int;
type UShort (line 47) | typedef unsigned short UShort;
type Short (line 48) | typedef short Short;
type SInt (line 49) | typedef short int SInt;
type Bool (line 50) | typedef unsigned int Bool;
type ULong (line 51) | typedef unsigned long ULong;
type Void (line 52) | typedef void Void;
type PV_STATUS (line 60) | typedef enum
type UChar (line 71) | typedef UChar PIXEL;
FILE: RtspCamera/jni/m4v_h263/enc/src/mp4enc_api.cpp
function OSCL_EXPORT_REF (line 158) | OSCL_EXPORT_REF Bool PVGetDefaultEncOption(VideoEncOptions *encOption, I...
function OSCL_EXPORT_REF (line 187) | OSCL_EXPORT_REF Bool PVInitVideoEncoder(VideoEncControls *encoderCont...
function OSCL_EXPORT_REF (line 1072) | OSCL_EXPORT_REF Bool PVCleanUpVideoEncoder(VideoEncControls *encoderC...
function OSCL_EXPORT_REF (line 1276) | OSCL_EXPORT_REF Bool PVGetVolHeader(VideoEncControls *encCtrl, UChar *vo...
function OSCL_EXPORT_REF (line 1319) | OSCL_EXPORT_REF UChar* PVGetOverrunBuffer(VideoEncControls *encCtrl)
function OSCL_EXPORT_REF (line 1347) | OSCL_EXPORT_REF Bool PVEncodeVideoFrame(VideoEncControls *encCtrl, Video...
function OSCL_EXPORT_REF (line 1647) | OSCL_EXPORT_REF Bool PVEncodeFrameSet(VideoEncControls *encCtrl, VideoEn...
function OSCL_EXPORT_REF (line 1839) | OSCL_EXPORT_REF Bool PVEncodeSlice(VideoEncControls *encCtrl, UChar *bst...
function OSCL_EXPORT_REF (line 1977) | OSCL_EXPORT_REF Bool PVGetH263ProfileLevelID(VideoEncControls *encCtrl, ...
function OSCL_EXPORT_REF (line 2047) | OSCL_EXPORT_REF Bool PVGetMPEG4ProfileLevelID(VideoEncControls *encCtrl,...
function OSCL_EXPORT_REF (line 2092) | OSCL_EXPORT_REF Bool PVUpdateEncFrameRate(VideoEncControls *encCtrl, flo...
function OSCL_EXPORT_REF (line 2133) | OSCL_EXPORT_REF Bool PVUpdateBitRate(VideoEncControls *encCtrl, Int *bit...
function Bool (line 2171) | Bool PVUpdateVBVDelay(VideoEncControls *encCtrl, float delay)
function OSCL_EXPORT_REF (line 2212) | OSCL_EXPORT_REF Bool PVUpdateIFrameInterval(VideoEncControls *encCtrl, I...
function OSCL_EXPORT_REF (line 2237) | OSCL_EXPORT_REF Bool PVUpdateNumIntraMBRefresh(VideoEncControls *encC...
function OSCL_EXPORT_REF (line 2262) | OSCL_EXPORT_REF Bool PVIFrameRequest(VideoEncControls *encCtrl)
function OSCL_EXPORT_REF (line 2288) | OSCL_EXPORT_REF Int PVGetEncMemoryUsage(VideoEncControls *encCtrl)
function OSCL_EXPORT_REF (line 2312) | OSCL_EXPORT_REF Bool PVGetHintTrack(VideoEncControls *encCtrl, MP4HintTr...
function OSCL_EXPORT_REF (line 2340) | OSCL_EXPORT_REF Bool PVGetMaxVideoFrameSize(VideoEncControls *encCtrl, I...
function OSCL_EXPORT_REF (line 2376) | OSCL_EXPORT_REF Bool PVGetVBVSize(VideoEncControls *encCtrl, Int *VBVSize)
function PV_STATUS (line 2404) | PV_STATUS EncodeVOS_Start(VideoEncControls *encoderControl)
function PV_STATUS (line 2569) | PV_STATUS VOS_End(VideoEncControls *encoderControl)
function Int (line 2593) | Int DetermineCodingLayer(VideoEncData *video, Int *nLayer, ULong modTime)
function DetermineVopType (line 2758) | void DetermineVopType(VideoEncData *video, Int currLayer)
function Int (line 2828) | Int UpdateSkipNextFrame(VideoEncData *video, ULong *modTime, Int *size, ...
function Bool (line 2962) | Bool SetProfile_BufferSize(VideoEncData *video, float delay, Int bInitia...
FILE: RtspCamera/jni/m4v_h263/enc/src/mp4enc_lib.h
type QPstruct (line 65) | struct QPstruct
type QPstruct (line 69) | struct QPstruct
type QPstruct (line 73) | struct QPstruct
type QPstruct (line 76) | struct QPstruct
FILE: RtspCamera/jni/m4v_h263/enc/src/mp4lib_int.h
type BitstreamEncVideo (line 29) | typedef struct tagBitstream
type Vop (line 42) | typedef struct tagVOP
type Vol (line 76) | typedef struct tagVol
type MacroBlock (line 132) | typedef struct tagMacroBlock
type RunLevelBlock (line 139) | typedef struct tagRunLevelBlock
type HeaderInfoEncVideo (line 146) | typedef struct tagHeaderInfoDecVideo
type Short (line 152) | typedef Short typeDCStore[6];
type Short (line 153) | typedef Short typeDCACStore[4][8];
type MOT (line 155) | typedef struct tagMOT
type HintTrackInfo (line 162) | typedef struct tagHintTrackInfo
type VideoEncParams (line 172) | typedef struct tagVideoEncParams
type FuncPtr (line 236) | typedef struct tagFuncPtr
type RDInfo (line 254) | typedef struct tagRDInfo
type MultiPass (line 262) | typedef struct tagMultiPass
type HTFM_Stat (line 309) | typedef struct tagHTFM_Stat
type VideoEncData (line 319) | typedef struct tagVideoEncData
type VLCtable (line 441) | typedef struct tagVLCtable
type approxDCT (line 451) | typedef struct struct_approxDCT approxDCT;
type struct_approxDCT (line 452) | struct struct_approxDCT
type QPstruct (line 463) | struct QPstruct
FILE: RtspCamera/jni/m4v_h263/enc/src/pvm4vencoder.cpp
function OSCL_EXPORT_REF (line 34) | OSCL_EXPORT_REF CPVM4VEncoder::~CPVM4VEncoder()
function OSCL_EXPORT_REF (line 45) | OSCL_EXPORT_REF CPVM4VEncoder* CPVM4VEncoder::New(int32 aThreadId)
function OSCL_EXPORT_REF (line 84) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::SetObserver(MPVCVEIObserver ...
function OSCL_EXPORT_REF (line 91) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::AddBuffer(TPVVideoOutputData...
function OSCL_EXPORT_REF (line 105) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::Encode(TPVVideoInputData *aV...
function TCVEI_RETVAL (line 253) | TCVEI_RETVAL CPVM4VEncoder::ParseFSI(uint8* aFSIBuff, int FSILength, Vid...
function int16 (line 535) | int16 CPVM4VEncoder::iDecodeShortHeader(mp4StreamType *psBits, VideoEncO...
function int16 (line 615) | int16 CPVM4VEncoder::ShowBits(
function int16 (line 658) | int16 CPVM4VEncoder::FlushBits(
function int16 (line 689) | int16 CPVM4VEncoder::ReadBits(
function int16 (line 727) | int16 CPVM4VEncoder::ByteAlign(
function OSCL_EXPORT_REF (line 767) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::Initialize(TPVVideoInputForm...
function OSCL_EXPORT_REF (line 993) | OSCL_EXPORT_REF int32 CPVM4VEncoder::GetBufferSize()
function OSCL_EXPORT_REF (line 1004) | OSCL_EXPORT_REF int32 CPVM4VEncoder::GetEncodeWidth(int32 aLayer)
function OSCL_EXPORT_REF (line 1009) | OSCL_EXPORT_REF int32 CPVM4VEncoder::GetEncodeHeight(int32 aLayer)
function OSCL_EXPORT_REF (line 1014) | OSCL_EXPORT_REF float CPVM4VEncoder::GetEncodeFrameRate(int32 aLayer)
function OSCL_EXPORT_REF (line 1018) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::GetVolHeader(uint8 *volHeade...
function OSCL_EXPORT_REF (line 1039) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::EncodeFrame(TPVVideoInputDat...
function OSCL_EXPORT_REF (line 1183) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::FlushOutput(TPVVideoOutputDa...
function TCVEI_RETVAL (line 1190) | TCVEI_RETVAL CPVM4VEncoder::Terminate()
function OSCL_EXPORT_REF (line 1209) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::UpdateBitRate(int32 aNumLaye...
function OSCL_EXPORT_REF (line 1227) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::UpdateFrameRate(int32 aNumLa...
function OSCL_EXPORT_REF (line 1241) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::UpdateIFrameInterval(int32 a...
function OSCL_EXPORT_REF (line 1252) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::IFrameRequest()
function OSCL_EXPORT_REF (line 1263) | OSCL_EXPORT_REF TCVEI_RETVAL CPVM4VEncoder::SetIntraMBRefresh(int32 aNum...
FILE: RtspCamera/jni/m4v_h263/enc/src/rate_control.cpp
function PV_STATUS (line 50) | PV_STATUS RC_Initialize(void *input)
function PV_STATUS (line 156) | PV_STATUS RC_Cleanup(rateControl *rc[], Int numLayers)
function PV_STATUS (line 183) | PV_STATUS RC_VopQPSetting(VideoEncData *video, rateControl *prc[])
function Void (line 271) | Void SaveRDSamples(MultiPass *pMP, Int counter_samples)
function PV_STATUS (line 291) | PV_STATUS RC_VopUpdateStat(VideoEncData *video, rateControl *rc)
function Int (line 360) | Int RC_GetSkipNextFrame(VideoEncData *video, Int currLayer)
function RC_ResetSkipNextFrame (line 365) | void RC_ResetSkipNextFrame(VideoEncData *video, Int currLayer)
function PV_STATUS (line 383) | PV_STATUS RC_UpdateBuffer(VideoEncData *video, Int currLayer, Int num_skip)
function PV_STATUS (line 416) | PV_STATUS RC_UpdateBXRCParams(void *input)
function targetBitCalculation (line 529) | void targetBitCalculation(void *input)
function calculateQuantizer_Multipass (line 699) | void calculateQuantizer_Multipass(void *input)
function updateRateControl (line 808) | void updateRateControl(rateControl *rc, VideoEncData *video)
function updateRC_PostProc (line 857) | void updateRC_PostProc(rateControl *rc, VideoEncData *video)
FILE: RtspCamera/jni/m4v_h263/enc/src/rate_control.h
type dataPointArray (line 23) | typedef struct tagdataPointArray
type rateControl (line 33) | typedef struct
FILE: RtspCamera/jni/m4v_h263/enc/src/sad.cpp
function Int (line 79) | Int SAD_Macroblock_C(UChar *ref, UChar *blk, Int dmin_lx, void *extra_info)
function Int (line 107) | Int SAD_MB_HTFM_Collect(UChar *ref, UChar *blk, Int dmin_lx, void *extra...
function Int (line 209) | Int SAD_MB_HTFM(UChar *ref, UChar *blk, Int dmin_lx, void *extra_info)
function Int (line 315) | Int SAD_Block_C(UChar *ref, UChar *blk, Int dmin, Int lx, void *)
FILE: RtspCamera/jni/m4v_h263/enc/src/sad_halfpel.cpp
function Int (line 65) | Int HalfPel1_SAD_MB(UChar *ref, UChar *blk, Int dmin, Int width, Int ih,...
function Int (line 97) | Int HalfPel2_SAD_MB(UChar *ref, UChar *blk, Int dmin, Int width)
function Int (line 139) | Int HalfPel1_SAD_Blk(UChar *ref, UChar *blk, Int dmin, Int width, Int ih...
function Int (line 171) | Int HalfPel2_SAD_Blk(UChar *ref, UChar *blk, Int dmin, Int width)
function Int (line 222) | Int SAD_MB_HalfPel_Cxhyh(UChar *ref, UChar *blk, Int dmin_rx, void *extr...
function Int (line 263) | Int SAD_MB_HalfPel_Cyh(UChar *ref, UChar *blk, Int dmin_rx, void *extra_...
function Int (line 299) | Int SAD_MB_HalfPel_Cxh(UChar *ref, UChar *blk, Int dmin_rx, void *extra_...
function Int (line 336) | Int SAD_MB_HP_HTFM_Collectxhyh(UChar *ref, UChar *blk, Int dmin_rx, void...
function Int (line 415) | Int SAD_MB_HP_HTFM_Collectyh(UChar *ref, UChar *blk, Int dmin_rx, void *...
function Int (line 493) | Int SAD_MB_HP_HTFM_Collectxh(UChar *ref, UChar *blk, Int dmin_rx, void *...
function Int (line 570) | Int SAD_MB_HP_HTFMxhyh(UChar *ref, UChar *blk, Int dmin_rx, void *extra_...
function Int (line 638) | Int SAD_MB_HP_HTFMyh(UChar *ref, UChar *blk, Int dmin_rx, void *extra_info)
function Int (line 704) | Int SAD_MB_HP_HTFMxh(UChar *ref, UChar *blk, Int dmin_rx, void *extra_info)
function Int (line 781) | Int SAD_Blk_HalfPel_C(UChar *ref, UChar *blk, Int dmin, Int width, Int r...
FILE: RtspCamera/jni/m4v_h263/enc/src/sad_halfpel_inline.h
function int32 (line 34) | __inline int32 INTERP1_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 43) | __inline int32 INTERP2_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 54) | __inline int32 INTERP1_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 66) | __inline int32 INTERP2_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 81) | __inline int32 INTERP1_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 101) | __inline int32 INTERP2_SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
FILE: RtspCamera/jni/m4v_h263/enc/src/sad_inline.h
function int32 (line 33) | __inline int32 SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 42) | __inline int32 sad_4pixel(int32 src1, int32 src2, int32 mask)
function int32 (line 82) | __inline int32 simd_sad_mb(UChar *ref, UChar *blk, Int dmin, Int lx)
function int32 (line 177) | __inline int32 SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 189) | __inline int32 sad_4pixel(int32 src1, int32 src2, int32 mask)
function int32 (line 208) | __inline int32 sad_4pixelN(int32 src1, int32 src2, int32 mask)
function int32 (line 257) | __inline int32 simd_sad_mb(UChar *ref, UChar *blk, Int dmin, Int lx)
function int32 (line 346) | __inline int32 SUB_SAD(int32 sad, int32 tmp, int32 tmp2)
function int32 (line 365) | __inline int32 sad_4pixel(int32 src1, int32 src2, int32 mask)
function int32 (line 390) | __inline int32 sad_4pixelN(int32 src1, int32 src2, int32 mask)
function int32 (line 446) | __inline int32 simd_sad_mb(UChar *ref, UChar *blk, Int dmin, Int lx)
FILE: RtspCamera/jni/m4v_h263/enc/src/sad_mb_offset.h
function int32 (line 31) | __inline int32 sad_mb_offset1(UChar *ref, UChar *blk, Int lx, Int dmin)
function int32 (line 134) | __inline int32 sad_mb_offset1(UChar *ref, UChar *blk, Int lx, Int dmin, ...
function int32 (line 227) | __inline int32 sad_mb_offset1(UChar *ref, UChar *blk, Int lx, Int dmin)
FILE: RtspCamera/jni/m4v_h263/enc/src/vlc_encode.cpp
function Int (line 181) | Int
function Int (line 196) | Int
function Int (line 210) | Int
function Int (line 238) | Int
function Int (line 253) | Int
function Int (line 268) | Int
function Int (line 291) | Int PutCoeff_Inter(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 311) | Int PutCoeff_Inter_Last(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 333) | Int PutCoeff_Intra(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 365) | Int PutCoeff_Intra_Last(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 393) | Int PutCoeff_Inter_RVLC(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 449) | Int PutCoeff_Inter_RVLC_Last(Int run, Int level, BitstreamEncVideo *bits...
function Int (line 483) | Int PutCoeff_Intra_RVLC(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 545) | Int PutCoeff_Intra_RVLC_Last(Int run, Int level, BitstreamEncVideo *bits...
function Int (line 579) | Int
function Int (line 609) | Int PutRunCoeff_Inter_Last(Int run, Int level, BitstreamEncVideo *bitstr...
function Int (line 638) | Int PutRunCoeff_Intra(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 688) | Int PutRunCoeff_Intra_Last(Int run, Int level, BitstreamEncVideo *bitstr...
function Int (line 728) | Int
function Int (line 756) | Int PutLevelCoeff_Inter_Last(Int run, Int level, BitstreamEncVideo *bits...
function Int (line 783) | Int PutLevelCoeff_Intra(Int run, Int level, BitstreamEncVideo *bitstream)
function Int (line 829) | Int PutLevelCoeff_Intra_Last(Int run, Int level, BitstreamEncVideo *bits...
function MBVlcEncodeDataPar_I_VOP (line 881) | void MBVlcEncodeDataPar_I_VOP(
function MBVlcEncodeDataPar_P_VOP (line 969) | void MBVlcEncodeDataPar_P_VOP(
function MBVlcEncodeCombined_I_VOP (line 1127) | void MBVlcEncodeCombined_I_VOP(
function MBVlcEncodeCombined_P_VOP (line 1243) | void MBVlcEncodeCombined_P_VOP(
function Void (line 1432) | Void BlockCodeCoeff_RVLC(RunLevelBlock *RLB, BitstreamEncVideo *bs, Int ...
function Void (line 1504) | Void BlockCodeCoeff_ShortHeader(RunLevelBlock *RLB, BitstreamEncVideo *b...
function Void (line 1573) | Void BlockCodeCoeff_Normal(RunLevelBlock *RLB, BitstreamEncVideo *bs, In...
function RunLevel (line 1785) | void RunLevel(VideoEncData *video, Int intra, Int intraDC_decision, Int ...
function Bool (line 1926) | static Bool IntraDCSwitch_Decision(Int Mode, Int intra_dc_vlc_thr, Int i...
function Int (line 1944) | Int IntraDC_dpcm(Int val, Int lum, BitstreamEncVideo *bitstream)
function Void (line 2014) | Void DCACPred(VideoEncData *video, UChar Mode, Int *intraDC_decision, In...
function Void (line 2532) | Void find_pmvs(VideoEncData *video, Int block, Int *mvx, Int *mvy)
function Void (line 2732) | Void WriteMVcomponent(Int f_code, Int dmv, BitstreamEncVideo *bs)
function Void (line 2754) | Void
FILE: RtspCamera/jni/m4v_h263/enc/src/vlc_encode_inline.h
function Int (line 23) | __inline Int zero_run_search(UInt *bitmapzz, Short *dataBlock, RunLevel...
function Int (line 214) | __inline Int m4v_enc_clz(UInt temp)
function Int (line 227) | __inline Int zero_run_search(UInt *bitmapzz, Short *dataBlock, RunLevel...
FILE: RtspCamera/jni/m4v_h263/enc/src/vop.cpp
function PV_STATUS (line 39) | PV_STATUS EncodeVop(VideoEncData *video)
function PV_STATUS (line 124) | PV_STATUS EncodeVop_NoME(VideoEncData *video)
function PV_STATUS (line 276) | PV_STATUS EncodeGOVHeader(BitstreamEncVideo *stream, UInt seconds)
function PV_STATUS (line 309) | PV_STATUS EncodeVopNotCoded(VideoEncData *video, UChar *bstream, Int *si...
function PV_STATUS (line 353) | PV_STATUS EncodeVOPHeader(BitstreamEncVideo *stream, Vol *currVol, Vop *...
function PV_STATUS (line 412) | PV_STATUS EncodeShortHeader(BitstreamEncVideo *stream, Vop *currVop)
function PV_STATUS (line 504) | PV_STATUS EncodeVideoPacketHeader(VideoEncData *video, int MB_number,
FILE: RtspCamera/src/com/orangelabs/rcs/core/CoreException.java
class CoreException (line 26) | public class CoreException extends java.lang.Exception {
method CoreException (line 34) | public CoreException(String error) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/CodecChain.java
class CodecChain (line 31) | public class CodecChain {
method CodecChain (line 57) | public CodecChain(Codec[] codecs, ProcessorOutputStream renderer) {
method process (line 82) | public int process(Buffer input) {
method doProcess (line 94) | private int doProcess(int codecNo, Buffer input) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/MediaRegistry.java
class MediaRegistry (line 38) | public class MediaRegistry {
method getSupportedVideoFormats (line 55) | public static Vector<VideoFormat> getSupportedVideoFormats() {
method getSupportedAudioFormats (line 71) | public static Vector<AudioFormat> getSupportedAudioFormats() {
method generateFormat (line 88) | public static Format generateFormat(String codec) {
method isCodecSupported (line 98) | public static boolean isCodecSupported(String codec) {
method generateEncodingCodecChain (line 109) | public static Codec[] generateEncodingCodecChain(String encoding) {
method generateDecodingCodecChain (line 128) | public static Codec[] generateDecodingCodecChain(String encoding) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/MediaRtpReceiver.java
class MediaRtpReceiver (line 33) | public class MediaRtpReceiver {
method MediaRtpReceiver (line 59) | public MediaRtpReceiver(int localPort) {
method prepareSession (line 70) | public void prepareSession(MediaOutput renderer, Format format)
method startSession (line 107) | public void startSession() {
method stopSession (line 121) | public void stopSession() {
method getInputStream (line 137) | public RtpInputStream getInputStream() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/Processor.java
class Processor (line 33) | public class Processor extends Thread {
method Processor (line 71) | public Processor(ProcessorInputStream inputStream, ProcessorOutputStre...
method startProcessing (line 88) | public void startProcessing() {
method stopProcessing (line 100) | public void stopProcessing() {
method run (line 114) | public void run() {
method getInputStream (line 178) | public ProcessorInputStream getInputStream() {
method getOutputStream (line 187) | public ProcessorOutputStream getOutputStream() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/RtpException.java
class RtpException (line 26) | public class RtpException extends java.lang.Exception {
method RtpException (line 34) | public RtpException(String error) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/Codec.java
class Codec (line 29) | public abstract class Codec {
method setInputFormat (line 67) | public Format setInputFormat(Format input) {
method setOutputFormat (line 78) | public Format setOutputFormat(Format output) {
method getInputFormat (line 88) | public Format getInputFormat() {
method getOutputFormat (line 97) | public Format getOutputFormat() {
method reset (line 104) | public void reset() {
method open (line 110) | public void open() {
method close (line 116) | public void close() {
method isEOM (line 124) | protected boolean isEOM(Buffer inputBuffer) {
method propagateEOM (line 133) | protected void propagateEOM(Buffer outputBuffer) {
method updateOutput (line 146) | protected void updateOutput(Buffer outputBuffer, Format format, int le...
method checkInputBuffer (line 159) | protected boolean checkInputBuffer(Buffer inputBuffer) {
method validateByteArraySize (line 170) | protected byte[] validateByteArraySize(Buffer buffer, int newSize) {
method process (line 195) | public abstract int process(Buffer input, Buffer output);
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/VideoCodec.java
class VideoCodec (line 28) | public abstract class VideoCodec extends Codec {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/H263Config.java
class H263Config (line 26) | public class H263Config {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/H263RtpHeader.java
class H263RtpHeader (line 30) | public class H263RtpHeader{
method H263RtpHeader (line 49) | public H263RtpHeader(final byte RR, final boolean P, final boolean V, ...
method H263RtpHeader (line 62) | public H263RtpHeader(byte[] data){
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/JavaDepacketizer.java
class JavaDepacketizer (line 29) | public class JavaDepacketizer extends VideoCodec {
method JavaDepacketizer (line 45) | public JavaDepacketizer(){
method process (line 55) | public int process(Buffer input, Buffer output){
class FrameAssembler (line 76) | static class FrameAssembler{
method put (line 85) | public void put(Buffer buffer){
method complete (line 137) | public boolean complete(){
method copyToBuffer (line 151) | private void copyToBuffer(Buffer bDest){
method getTimeStamp (line 176) | public long getTimeStamp(){
class FrameAssemblerCollection (line 186) | static class FrameAssemblerCollection{
method put (line 195) | public void put(Buffer buffer){
method getLastActiveAssembler (line 205) | public FrameAssembler getLastActiveAssembler(){
method createNewAssembler (line 215) | public int createNewAssembler(long timeStamp){
method getAssembler (line 252) | public int getAssembler(long timeStamp){
method removeOldestThan (line 273) | public void removeOldestThan(long timeStamp){
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/JavaPacketizer.java
class JavaPacketizer (line 27) | public class JavaPacketizer extends VideoCodec {
method JavaPacketizer (line 33) | public JavaPacketizer(){
method process (line 36) | public int process(Buffer input, Buffer output){
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/decoder/NativeH263Decoder.java
class NativeH263Decoder (line 25) | public class NativeH263Decoder
method NativeH263Decoder (line 28) | public NativeH263Decoder()
method InitDecoder (line 32) | public static native int InitDecoder(int i, int j);
method DeinitDecoder (line 34) | public static native int DeinitDecoder();
method DecodeAndConvert (line 36) | public static native int DecodeAndConvert(byte abyte0[], int ai[], lon...
method InitParser (line 38) | public static native int InitParser(String s);
method DeinitParser (line 40) | public static native int DeinitParser();
method getVideoLength (line 42) | public static native int getVideoLength();
method getVideoWidth (line 44) | public static native int getVideoWidth();
method getVideoHeight (line 46) | public static native int getVideoHeight();
method getVideoCoding (line 48) | public static native String getVideoCoding();
method getVideoSample (line 50) | public static native VideoSample getVideoSample(int ai[]);
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/decoder/VideoSample.java
class VideoSample (line 22) | public class VideoSample
method VideoSample (line 28) | public VideoSample(byte data[], int timestamp)
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/encoder/NativeH263Encoder.java
class NativeH263Encoder (line 25) | public class NativeH263Encoder
method NativeH263Encoder (line 28) | public NativeH263Encoder()
method InitEncoder (line 32) | public static native int InitEncoder(NativeH263EncoderParams nativeh26...
method EncodeFrame (line 34) | public static native byte[] EncodeFrame(byte abyte0[], long l);
method DeinitEncoder (line 36) | public static native int DeinitEncoder();
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h263/encoder/NativeH263EncoderParams.java
class NativeH263EncoderParams (line 22) | public class NativeH263EncoderParams
method NativeH263EncoderParams (line 76) | public NativeH263EncoderParams()
method getEncMode (line 105) | public int getEncMode()
method getPacketSize (line 110) | public int getPacketSize()
method getProfile_level (line 115) | public int getProfile_level()
method isRvlcEnable (line 120) | public boolean isRvlcEnable()
method getGobHeaderInterval (line 125) | public int getGobHeaderInterval()
method getNumLayers (line 130) | public int getNumLayers()
method getTimeIncRes (line 135) | public int getTimeIncRes()
method getTickPerSrc (line 140) | public int getTickPerSrc()
method getEncHeight (line 145) | public int getEncHeight()
method getEncWidth (line 150) | public int getEncWidth()
method getEncFrameRate (line 155) | public float getEncFrameRate()
method getBitRate (line 160) | public int getBitRate()
method getIQuant (line 165) | public int getIQuant()
method getPQuant (line 170) | public int getPQuant()
method getQuantType (line 175) | public int getQuantType()
method getRcType (line 180) | public int getRcType()
method isNoFrameSkipped (line 185) | public boolean isNoFrameSkipped()
method getIntraPeriod (line 190) | public int getIntraPeriod()
method getNumIntraMB (line 195) | public int getNumIntraMB()
method isSceneDetect (line 200) | public boolean isSceneDetect()
method getSearchRange (line 205) | public int getSearchRange()
method isMv8x8Enable (line 210) | public boolean isMv8x8Enable()
method getIntraDCVlcTh (line 215) | public int getIntraDCVlcTh()
method isUseACPred (line 220) | public boolean isUseACPred()
method setEncMode (line 225) | public void setEncMode(int encMode)
method setPacketSize (line 230) | public void setPacketSize(int packetSize)
method setProfile_level (line 235) | public void setProfile_level(int profile_level)
method setRvlcEnable (line 240) | public void setRvlcEnable(boolean rvlcEnable)
method setGobHeaderInterval (line 245) | public void setGobHeaderInterval(int gobHeaderInterval)
method setNumLayers (line 250) | public void setNumLayers(int numLayers)
method setTimeIncRes (line 255) | public void setTimeIncRes(int timeIncRes)
method setTickPerSrc (line 260) | public void setTickPerSrc(int tickPerSrc)
method setEncHeight (line 265) | public void setEncHeight(int encHeight)
method setEncWidth (line 270) | public void setEncWidth(int encWidth)
method setEncFrameRate (line 275) | public void setEncFrameRate(float encFrameRate)
method setBitRate (line 280) | public void setBitRate(int bitRate)
method setIQuant (line 285) | public void setIQuant(int quant)
method setPQuant (line 290) | public void setPQuant(int quant)
method setQuantType (line 295) | public void setQuantType(int quantType)
method setRcType (line 300) | public void setRcType(int rcType)
method setNoFrameSkipped (line 305) | public void setNoFrameSkipped(boolean noFrameSkipped)
method setIntraPeriod (line 310) | public void setIntraPeriod(int intraPeriod)
method setNumIntraMB (line 315) | public void setNumIntraMB(int numIntraMB)
method setSceneDetect (line 320) | public void setSceneDetect(boolean sceneDetect)
method setSearchRange (line 325) | public void setSearchRange(int searchRange)
method setMv8x8Enable (line 330) | public void setMv8x8Enable(boolean mv8x8Enable)
method setIntraDCVlcTh (line 335) | public void setIntraDCVlcTh(int intraDCVlcTh)
method setUseACPred (line 340) | public void setUseACPred(boolean useACPred)
method getVbvDelay (line 345) | public float getVbvDelay()
method setVbvDelay (line 350) | public void setVbvDelay(float vbvDelay)
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h264/H264Config.java
class H264Config (line 26) | public class H264Config {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h264/decoder/NativeH264Decoder.java
class NativeH264Decoder (line 23) | public class NativeH264Decoder
method NativeH264Decoder (line 26) | public NativeH264Decoder()
method InitDecoder (line 30) | public static native int InitDecoder();
method DeinitDecoder (line 32) | public static native int DeinitDecoder();
method DecodeAndConvert (line 34) | public static synchronized native int DecodeAndConvert(byte abyte0[], ...
method InitParser (line 36) | public static native int InitParser(String s);
method DeinitParser (line 38) | public static native int DeinitParser();
method getVideoLength (line 40) | public static native int getVideoLength();
method getVideoWidth (line 42) | public static native int getVideoWidth();
method getVideoHeight (line 44) | public static native int getVideoHeight();
method getVideoCoding (line 46) | public static native String getVideoCoding();
method getVideoSample (line 48) | public static native VideoSample getVideoSample(int ai[]);
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/codec/video/h264/encoder/NativeH264Encoder.java
class NativeH264Encoder (line 22) | public class NativeH264Encoder
method NativeH264Encoder (line 25) | public NativeH264Encoder()
method InitEncoder (line 29) | public static native int InitEncoder(int i, int j, int k);
method EncodeFrame (line 31) | public static native byte[] EncodeFrame(byte abyte0[], long l);
method DeinitEncoder (line 33) | public static native int DeinitEncoder();
method getLastEncodeStatus (line 35) | public static native int getLastEncodeStatus();
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpAppPacket.java
class RtcpAppPacket (line 29) | public class RtcpAppPacket extends RtcpPacket {
method RtcpAppPacket (line 34) | public RtcpAppPacket(RtcpPacket parent) {
method RtcpAppPacket (line 40) | public RtcpAppPacket(int ssrc, int name, int subtype, byte data[]) {
method calcLength (line 57) | public int calcLength() {
method assemble (line 61) | public void assemble(DataOutputStream out) throws IOException {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpByePacket.java
class RtcpByePacket (line 29) | public class RtcpByePacket extends RtcpPacket {
method RtcpByePacket (line 34) | public RtcpByePacket(RtcpPacket parent) {
method RtcpByePacket (line 39) | public RtcpByePacket(int ssrc[], byte reason[]) {
method calcLength (line 53) | public int calcLength() {
method assemble (line 58) | public void assemble(DataOutputStream out) throws IOException {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpCompoundPacket.java
class RtcpCompoundPacket (line 32) | public class RtcpCompoundPacket extends RtcpPacket {
method RtcpCompoundPacket (line 35) | public RtcpCompoundPacket(Packet packet) {
method RtcpCompoundPacket (line 40) | public RtcpCompoundPacket(RtcpPacket[] rtcppackets) {
method assemble (line 45) | public void assemble(int i, boolean bool) {
method assemble (line 80) | public void assemble(DataOutputStream dataoutputstream) throws IOExcep...
method calcLength (line 84) | public int calcLength() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpPacket.java
class RtcpPacket (line 30) | public abstract class RtcpPacket extends Packet {
method RtcpPacket (line 75) | public RtcpPacket() {
method RtcpPacket (line 78) | public RtcpPacket(RtcpPacket rtcppacket) {
method RtcpPacket (line 84) | public RtcpPacket(Packet packet) {
method assemble (line 90) | public abstract void assemble(DataOutputStream dataoutputstream) throw...
method calcLength (line 92) | public abstract int calcLength();
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpPacketReceiver.java
class RtcpPacketReceiver (line 43) | public class RtcpPacketReceiver extends Thread {
method RtcpPacketReceiver (line 76) | public RtcpPacketReceiver(int port, RtcpSession rtcpSession) throws IO...
method close (line 95) | public void close() throws IOException {
method run (line 111) | public void run() {
method handlePacket (line 140) | public RtcpPacket handlePacket(Packet p) {
method parseRtcpPacket (line 162) | public RtcpPacket parseRtcpPacket(Packet packet) {
method addRtcpListener (line 462) | public void addRtcpListener(RtcpEventListener listener) {
method removeRtcpListener (line 474) | public void removeRtcpListener(RtcpEventListener listener) {
method notifyRtcpListeners (line 486) | public void notifyRtcpListeners(RtcpEvent event) {
method getRtcpReceptionStats (line 498) | public RtcpStatisticsReceiver getRtcpReceptionStats() {
method getConnection (line 507) | public DatagramConnection getConnection() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpPacketTransmitter.java
class RtcpPacketTransmitter (line 34) | public class RtcpPacketTransmitter extends Thread {
method RtcpPacketTransmitter (line 88) | public RtcpPacketTransmitter(String address, int port, RtcpSession rtc...
method RtcpPacketTransmitter (line 114) | public RtcpPacketTransmitter(String address, int port, RtcpSession rtc...
method close (line 140) | public void close() throws IOException {
method run (line 155) | public void run() {
method assembleRtcpPacket (line 214) | private byte[] assembleRtcpPacket() {
method assembleSenderReportPacket (line 247) | private byte[] assembleSenderReportPacket() {
method assembleReceiverReportPacket (line 290) | private byte[] assembleReceiverReportPacket() {
method assembleRTCPReceptionReport (line 320) | private byte[] assembleRTCPReceptionReport() {
method sendByePacket (line 347) | public void sendByePacket() {
method makereports (line 372) | public Vector<RtcpSdesPacket> makereports() {
method transmit (line 393) | private void transmit(RtcpCompoundPacket packet) {
method transmit (line 421) | private void transmit(byte packet[]) {
method getStatistics (line 442) | public RtcpStatisticsTransmitter getStatistics() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpPacketUtils.java
class RtcpPacketUtils (line 26) | public class RtcpPacketUtils {
method longToBytes (line 35) | public static byte[] longToBytes(long data, int n) {
method append (line 51) | public static byte[] append(byte[] pck1, byte[] pck2) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpReceiverReportPacket.java
class RtcpReceiverReportPacket (line 29) | public class RtcpReceiverReportPacket extends RtcpPacket {
method RtcpReceiverReportPacket (line 33) | public RtcpReceiverReportPacket(int i, RtcpReport[] rtcpreportblocks) {
method RtcpReceiverReportPacket (line 40) | public RtcpReceiverReportPacket(RtcpPacket rtcppacket) {
method assemble (line 45) | public void assemble(DataOutputStream dataoutputstream) throws IOExcep...
method calcLength (line 61) | public int calcLength() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpReport.java
class RtcpReport (line 26) | public class RtcpReport {
method getDLSR (line 36) | public long getDLSR() {
method getFractionLost (line 40) | public int getFractionLost() {
method getJitter (line 44) | public long getJitter() {
method getLSR (line 48) | public long getLSR() {
method getNumLost (line 52) | public long getNumLost() {
method getSSRC (line 56) | public long getSSRC() {
method getXtndSeqNum (line 60) | public long getXtndSeqNum() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpSdesBlock.java
class RtcpSdesBlock (line 26) | public class RtcpSdesBlock {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpSdesItem.java
class RtcpSdesItem (line 26) | public class RtcpSdesItem {
method RtcpSdesItem (line 30) | public RtcpSdesItem() {
method RtcpSdesItem (line 33) | public RtcpSdesItem(int i, String string) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpSdesPacket.java
class RtcpSdesPacket (line 29) | public class RtcpSdesPacket extends RtcpPacket {
method RtcpSdesPacket (line 33) | public RtcpSdesPacket(RtcpPacket parent) {
method RtcpSdesPacket (line 38) | public RtcpSdesPacket(RtcpSdesBlock sdes[]) {
method calcLength (line 47) | public int calcLength() {
method assemble (line 62) | public void assemble(DataOutputStream out) throws IOException {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpSenderReportPacket.java
class RtcpSenderReportPacket (line 29) | public class RtcpSenderReportPacket extends RtcpPacket {
method RtcpSenderReportPacket (line 38) | public RtcpSenderReportPacket(int i, RtcpReport[] rtcpreportblocks) {
method RtcpSenderReportPacket (line 45) | public RtcpSenderReportPacket(RtcpPacket rtcppacket) {
method assemble (line 50) | public void assemble(DataOutputStream dataoutputstream) throws IOExcep...
method calcLength (line 71) | public int calcLength() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpSession.java
class RtcpSession (line 29) | public class RtcpSession {
method RtcpSession (line 148) | public RtcpSession(boolean isSender, double bandwidth) {
method setMembers (line 175) | public void setMembers(int members) {
method setSenders (line 184) | public void setSenders(int senders) {
method getReportInterval (line 193) | public double getReportInterval() {
method updateavgrtcpsize (line 236) | public void updateavgrtcpsize(int size) {
method getMySource (line 245) | public RtpSource getMySource() {
method currentTime (line 254) | public long currentTime() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpStatisticsReceiver.java
class RtcpStatisticsReceiver (line 26) | public class RtcpStatisticsReceiver {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtcpStatisticsTransmitter.java
class RtcpStatisticsTransmitter (line 26) | public class RtcpStatisticsTransmitter {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtpPacket.java
class RtpPacket (line 32) | public class RtpPacket extends Packet {
method RtpPacket (line 42) | public RtpPacket() {
method RtpPacket (line 46) | public RtpPacket(Packet packet) {
method assemble (line 52) | public void assemble(int length) throws IOException {
method calcLength (line 71) | public int calcLength() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtpPacketReceiver.java
class RtpPacketReceiver (line 32) | public class RtpPacketReceiver {
method RtpPacketReceiver (line 74) | public RtpPacketReceiver(int port, RtcpSession rtcpSession) throws IOE...
method close (line 87) | public void close() {
method readRtpPacket (line 107) | public RtpPacket readRtpPacket() {
method setRecvBufSize (line 147) | public void setRecvBufSize(int size) {
method parseRtpPacket (line 157) | private RtpPacket parseRtpPacket(byte[] data) {
method getRtpReceptionStats (line 231) | public RtpStatisticsReceiver getRtpReceptionStats() {
method getConnection (line 240) | public DatagramConnection getConnection() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtpPacketTransmitter.java
class RtpPacketTransmitter (line 36) | public class RtpPacketTransmitter {
method RtpPacketTransmitter (line 75) | public RtpPacketTransmitter(RtcpSession rtcpSession) {
method RtpPacketTransmitter (line 91) | public RtpPacketTransmitter(String address, int port, RtcpSession rtcp...
method RtpPacketTransmitter (line 111) | public RtpPacketTransmitter(String address, int port, RtcpSession rtcp...
method close (line 134) | public void close() throws IOException {
method sendRtpPacket (line 150) | public void sendRtpPacket(Buffer buffer) throws IOException {
method buildRtpPacket (line 171) | private RtpPacket buildRtpPacket(Buffer buffer) {
method transmit (line 203) | private void transmit(Packet packet) {
method getStatistics (line 247) | public RtpStatisticsTransmitter getStatistics() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtpSource.java
class RtpSource (line 28) | public class RtpSource {
method RtpSource (line 139) | RtpSource(int sourceSSRC) {
method getExtendedMax (line 165) | public long getExtendedMax() {
method updateSeq (line 177) | public void updateSeq(long seq) {
method updateStatistics (line 198) | public int updateStatistics() {
method currentTime (line 239) | private static long currentTime() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtpStatisticsReceiver.java
class RtpStatisticsReceiver (line 26) | public class RtpStatisticsReceiver {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/core/RtpStatisticsTransmitter.java
class RtpStatisticsTransmitter (line 26) | public class RtpStatisticsTransmitter {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpApplicationEvent.java
class RtcpApplicationEvent (line 28) | public class RtcpApplicationEvent extends RtcpEvent {
method RtcpApplicationEvent (line 35) | public RtcpApplicationEvent(RtcpAppPacket packet) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpByeEvent.java
class RtcpByeEvent (line 28) | public class RtcpByeEvent extends RtcpEvent {
method RtcpByeEvent (line 35) | public RtcpByeEvent(RtcpByePacket packet) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpEvent.java
class RtcpEvent (line 28) | public abstract class RtcpEvent {
method RtcpEvent (line 39) | public RtcpEvent(RtcpPacket packet) {
method getPacket (line 48) | public RtcpPacket getPacket() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpEventListener.java
type RtcpEventListener (line 26) | public interface RtcpEventListener {
method receiveRtcpEvent (line 32) | void receiveRtcpEvent(RtcpEvent event);
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpReceiverReportEvent.java
class RtcpReceiverReportEvent (line 28) | public class RtcpReceiverReportEvent extends RtcpEvent {
method RtcpReceiverReportEvent (line 35) | public RtcpReceiverReportEvent(RtcpReceiverReportPacket packet) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpSdesEvent.java
class RtcpSdesEvent (line 28) | public class RtcpSdesEvent extends RtcpEvent {
method RtcpSdesEvent (line 35) | public RtcpSdesEvent(RtcpSdesPacket packet) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/event/RtcpSenderReportEvent.java
class RtcpSenderReportEvent (line 28) | public class RtcpSenderReportEvent extends RtcpEvent {
method RtcpSenderReportEvent (line 35) | public RtcpSenderReportEvent(RtcpSenderReportPacket packet) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/DummyFormat.java
class DummyFormat (line 26) | public class DummyFormat extends Format {
method DummyFormat (line 41) | public DummyFormat() {
method getDataChunkSize (line 50) | public int getDataChunkSize() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/Format.java
class Format (line 26) | public abstract class Format {
method Format (line 48) | public Format(String codec, int payload) {
method getCodec (line 58) | public String getCodec() {
method getPayload (line 67) | public int getPayload() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/audio/AudioFormat.java
class AudioFormat (line 26) | public class AudioFormat extends Format {
method AudioFormat (line 33) | public AudioFormat(String codec, int payload) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/audio/PcmuAudioFormat.java
class PcmuAudioFormat (line 26) | public class PcmuAudioFormat extends AudioFormat {
method PcmuAudioFormat (line 41) | public PcmuAudioFormat() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/video/H263VideoFormat.java
class H263VideoFormat (line 26) | public class H263VideoFormat extends VideoFormat {
method H263VideoFormat (line 41) | public H263VideoFormat() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/video/H264VideoFormat.java
class H264VideoFormat (line 26) | public class H264VideoFormat extends VideoFormat {
method H264VideoFormat (line 41) | public H264VideoFormat() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/format/video/VideoFormat.java
class VideoFormat (line 26) | public class VideoFormat extends Format {
method VideoFormat (line 33) | public VideoFormat(String codec, int payload) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/media/MediaException.java
class MediaException (line 26) | public class MediaException extends java.lang.Exception {
method MediaException (line 34) | public MediaException(String error) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/media/MediaInput.java
type MediaInput (line 26) | public interface MediaInput {
method open (line 32) | public void open() throws MediaException;
method close (line 37) | public void close();
method readSample (line 45) | public MediaSample readSample() throws MediaException;
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/media/MediaOutput.java
type MediaOutput (line 27) | public interface MediaOutput {
method open (line 33) | public void open() throws MediaException;
method close (line 38) | public void close();
method writeSample (line 46) | public void writeSample(MediaSample sample) throws MediaException;
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/media/MediaSample.java
class MediaSample (line 26) | public class MediaSample {
method MediaSample (line 44) | public MediaSample(byte[] data, long time) {
method getData (line 54) | public byte[] getData() {
method getLength (line 63) | public int getLength() {
method getTimeStamp (line 76) | public long getTimeStamp() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/stream/DummyPacketSourceStream.java
class DummyPacketSourceStream (line 33) | public class DummyPacketSourceStream extends Thread implements Processor...
method DummyPacketSourceStream (line 72) | public DummyPacketSourceStream() {
method open (line 80) | public void open() throws Exception {
method close (line 90) | public void close() {
method getFormat (line 107) | public Format getFormat() {
method run (line 114) | public void run() {
method read (line 145) | public Buffer read() throws Exception {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/stream/MediaCaptureStream.java
class MediaCaptureStream (line 32) | public class MediaCaptureStream implements ProcessorInputStream {
method MediaCaptureStream (line 64) | public MediaCaptureStream(Format format, MediaInput player) {
method open (line 75) | public void open() throws Exception {
method close (line 92) | public void close() {
method getFormat (line 104) | public Format getFormat() {
method read (line 114) | public Buffer read() throws Exception {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/stream/MediaRendererStream.java
class MediaRendererStream (line 31) | public class MediaRendererStream implements ProcessorOutputStream {
method MediaRendererStream (line 47) | public MediaRendererStream(MediaOutput renderer) {
method open (line 56) | public void open() throws Exception {
method close (line 73) | public void close() {
method write (line 86) | public void write(Buffer buffer) throws Exception {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/stream/ProcessorInputStream.java
type ProcessorInputStream (line 26) | public interface ProcessorInputStream {
method open (line 33) | public void open() throws Exception;
method close (line 38) | public void close();
method read (line 46) | public Buffer read() throws Exception;
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/stream/ProcessorOutputStream.java
type ProcessorOutputStream (line 26) | public interface ProcessorOutputStream {
method open (line 32) | public void open() throws Exception;
method close (line 37) | public void close();
method write (line 45) | public void write(Buffer buffer) throws Exception;
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/stream/RtpInputStream.java
class RtpInputStream (line 36) | public class RtpInputStream implements ProcessorInputStream {
method RtpInputStream (line 78) | public RtpInputStream(int localPort, Format inputFormat) {
method open (line 90) | public void open() throws Exception {
method close (line 102) | public void close() {
method getRtpReceiver (line 125) | public RtpPacketReceiver getRtpReceiver() {
method getRtcpReceiver (line 134) | public RtcpPacketReceiver getRtcpReceiver() {
method read (line 144) | public Buffer read() throws Exception {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/util/Buffer.java
class Buffer (line 28) | public class Buffer {
method getFormat (line 118) | public Format getFormat() {
method setFormat (line 127) | public void setFormat(Format format) {
method getFlags (line 136) | public int getFlags() {
method setFlags (line 145) | public void setFlags(int flags) {
method isEOM (line 154) | public boolean isEOM() {
method setEOM (line 163) | public void setEOM(boolean eom) {
method isRTPMarkerSet (line 175) | public boolean isRTPMarkerSet() {
method setRTPMarker (line 184) | public void setRTPMarker(boolean marker) {
method isDiscard (line 196) | public boolean isDiscard() {
method setDiscard (line 205) | public void setDiscard(boolean discard) {
method getData (line 217) | public Object getData() {
method setData (line 226) | public void setData(Object data) {
method getLength (line 235) | public int getLength() {
method setLength (line 244) | public void setLength(int length) {
method getOffset (line 253) | public int getOffset() {
method setOffset (line 262) | public void setOffset(int offset) {
method getTimeStamp (line 271) | public long getTimeStamp() {
method setTimeStamp (line 280) | public void setTimeStamp(long timeStamp) {
method getDuration (line 289) | public long getDuration() {
method setDuration (line 298) | public void setDuration(long duration) {
method setSequenceNumber (line 307) | public void setSequenceNumber(long number) {
method getSequenceNumber (line 316) | public long getSequenceNumber() {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/util/Packet.java
class Packet (line 26) | public class Packet {
method Packet (line 50) | public Packet() {
method Packet (line 58) | public Packet(Packet packet) {
FILE: RtspCamera/src/com/orangelabs/rcs/core/ims/protocol/rtp/util/SystemTimeBase.java
class SystemTimeBase (line 24) | public class SystemTimeBase {
method getTime (line 36) | public long getTime() {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/AndroidFactory.java
class AndroidFactory (line 32) | public class AndroidFactory {
method getApplicationContext (line 43) | public static Context getApplicationContext() {
method setApplicationContext (line 52) | public static void setApplicationContext(Context context) {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/FactoryException.java
class FactoryException (line 26) | public class FactoryException extends java.lang.Exception {
method FactoryException (line 34) | public FactoryException(String error) {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/file/FileDescription.java
class FileDescription (line 26) | public class FileDescription {
method FileDescription (line 45) | public FileDescription(String name, long size) {
method FileDescription (line 53) | public FileDescription(String name, long size, boolean directory) {
method getSize (line 64) | public long getSize() {
method getName (line 73) | public String getName() {
method isDirectory (line 82) | public boolean isDirectory() {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/file/FileFactory.java
class FileFactory (line 33) | public abstract class FileFactory {
method loadFactory (line 45) | public static void loadFactory(String classname) throws FactoryExcepti...
method getFactory (line 62) | public static FileFactory getFactory() {
method openFileInputStream (line 73) | public abstract InputStream openFileInputStream(String url) throws IOE...
method openFileOutputStream (line 82) | public abstract OutputStream openFileOutputStream(String url) throws I...
method getFileDescription (line 91) | public abstract FileDescription getFileDescription(String url) throws ...
method getPhotoRootDirectory (line 98) | public abstract String getPhotoRootDirectory();
method getVideoRootDirectory (line 105) | public abstract String getVideoRootDirectory();
method getFileRootDirectory (line 112) | public abstract String getFileRootDirectory();
method updateMediaStorage (line 119) | public abstract void updateMediaStorage(String url);
method fileExists (line 127) | public abstract boolean fileExists(String url);
method createDirectory (line 135) | public static boolean createDirectory(String path) {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/logger/AndroidAppender.java
class AndroidAppender (line 31) | public class AndroidAppender extends Appender {
method AndroidAppender (line 35) | public AndroidAppender() {
method printTrace (line 46) | public synchronized void printTrace(String classname, int level, Strin...
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/AndroidDatagramConnection.java
class AndroidDatagramConnection (line 32) | public class AndroidDatagramConnection implements DatagramConnection {
method AndroidDatagramConnection (line 41) | public AndroidDatagramConnection() {
method open (line 49) | public void open() throws IOException {
method open (line 59) | public void open(int port) throws IOException {
method close (line 68) | public void close() throws IOException {
method receive (line 82) | public byte[] receive(int bufferSize) throws IOException {
method receive (line 104) | public byte[] receive() throws IOException {
method send (line 116) | public void send(String remoteAddr, int remotePort, byte[] data) throw...
method getLocalAddress (line 136) | public String getLocalAddress() throws IOException {
method getLocalPort (line 150) | public int getLocalPort() throws IOException {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/AndroidHttpConnection.java
class AndroidHttpConnection (line 33) | public class AndroidHttpConnection implements HttpConnection {
method open (line 45) | public void open(String url) throws IOException {
method close (line 56) | public void close() throws IOException {
method get (line 68) | public ByteArrayOutputStream get() throws IOException {
method post (line 82) | public ByteArrayOutputStream post() throws IOException {
method sendHttpRequest (line 97) | private ByteArrayOutputStream sendHttpRequest(String method) throws IO...
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/AndroidNetworkFactory.java
class AndroidNetworkFactory (line 30) | public class AndroidNetworkFactory extends NetworkFactory {
method getLocalIpAddress (line 37) | public String getLocalIpAddress() {
method createDatagramConnection (line 59) | public DatagramConnection createDatagramConnection() {
method createSocketClientConnection (line 68) | public SocketConnection createSocketClientConnection() {
method createSocketServerConnection (line 77) | public SocketServerConnection createSocketServerConnection() {
method createHttpConnection (line 86) | public HttpConnection createHttpConnection() {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/AndroidSocketConnection.java
class AndroidSocketConnection (line 32) | public class AndroidSocketConnection implements SocketConnection {
method AndroidSocketConnection (line 41) | public AndroidSocketConnection() {
method AndroidSocketConnection (line 49) | public AndroidSocketConnection(Socket socket) {
method open (line 60) | public void open(String remoteAddr, int remotePort) throws IOException {
method close (line 69) | public void close() throws IOException {
method getInputStream (line 82) | public InputStream getInputStream() throws IOException {
method getOutputStream (line 96) | public OutputStream getOutputStream() throws IOException {
method getRemoteAddress (line 110) | public String getRemoteAddress() throws IOException {
method getRemotePort (line 124) | public int getRemotePort() throws IOException {
method getLocalAddress (line 138) | public String getLocalAddress() throws IOException {
method getLocalPort (line 152) | public int getLocalPort() throws IOException {
method getSoTimeout (line 167) | public int getSoTimeout() throws IOException {
method setSoTimeout (line 182) | public void setSoTimeout(int timeout) throws IOException {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/AndroidSocketServerConnection.java
class AndroidSocketServerConnection (line 33) | public class AndroidSocketServerConnection implements SocketServerConnec...
method AndroidSocketServerConnection (line 47) | public AndroidSocketServerConnection() {
method open (line 56) | public void open(int port) throws IOException {
method close (line 65) | public void close() throws IOException {
method acceptConnection (line 78) | public SocketConnection acceptConnection() throws IOException {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/DatagramConnection.java
type DatagramConnection (line 28) | public interface DatagramConnection {
method open (line 39) | public void open() throws IOException;
method open (line 47) | public void open(int port) throws IOException;
method close (line 54) | public void close() throws IOException;
method send (line 64) | public void send(String remoteAddr, int remotePort, byte[] data) throw...
method receive (line 72) | public byte[] receive() throws IOException;
method receive (line 81) | public byte[] receive(int bufferSize) throws IOException;
method getLocalAddress (line 89) | public String getLocalAddress() throws IOException;
method getLocalPort (line 97) | public int getLocalPort() throws IOException;
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/HttpConnection.java
type HttpConnection (line 29) | public interface HttpConnection {
method open (line 46) | public void open(String url) throws IOException;
method close (line 53) | public void close() throws IOException;
method get (line 61) | public ByteArrayOutputStream get() throws IOException;
method post (line 69) | public ByteArrayOutputStream post() throws IOException;
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/NetworkFactory.java
class NetworkFactory (line 28) | public abstract class NetworkFactory {
method loadFactory (line 40) | public static void loadFactory(String classname) throws FactoryExcepti...
method getFactory (line 57) | public static NetworkFactory getFactory() {
method getLocalIpAddress (line 66) | public abstract String getLocalIpAddress();
method createDatagramConnection (line 73) | public abstract DatagramConnection createDatagramConnection();
method createSocketClientConnection (line 80) | public abstract SocketConnection createSocketClientConnection();
method createSocketServerConnection (line 87) | public abstract SocketServerConnection createSocketServerConnection();
method createHttpConnection (line 94) | public abstract HttpConnection createHttpConnection();
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/SocketConnection.java
type SocketConnection (line 30) | public interface SocketConnection {
method open (line 38) | public void open(String remoteAddr, int remotePort) throws IOException;
method close (line 45) | public void close() throws IOException;
method getInputStream (line 53) | public InputStream getInputStream() throws IOException;
method getOutputStream (line 61) | public OutputStream getOutputStream() throws IOException;
method getRemoteAddress (line 69) | public String getRemoteAddress() throws IOException;
method getRemotePort (line 77) | public int getRemotePort() throws IOException;
method getLocalAddress (line 85) | public String getLocalAddress() throws IOException;
method getLocalPort (line 93) | public int getLocalPort() throws IOException;
method getSoTimeout (line 102) | public int getSoTimeout() throws IOException;
method setSoTimeout (line 111) | public void setSoTimeout(int timeout) throws IOException;
FILE: RtspCamera/src/com/orangelabs/rcs/platform/network/SocketServerConnection.java
type SocketServerConnection (line 28) | public interface SocketServerConnection {
method open (line 35) | public void open(int port) throws IOException;
method close (line 42) | public void close() throws IOException;
method acceptConnection (line 50) | public SocketConnection acceptConnection() throws IOException;
FILE: RtspCamera/src/com/orangelabs/rcs/platform/registry/AndroidRegistryFactory.java
class AndroidRegistryFactory (line 32) | public class AndroidRegistryFactory extends RegistryFactory {
method AndroidRegistryFactory (line 48) | public AndroidRegistryFactory() throws CoreException {
method readString (line 65) | public String readString(String key, String defaultValue) {
method writeString (line 75) | public void writeString(String key, String value) {
method readInteger (line 88) | public int readInteger(String key, int defaultValue) {
method writeInteger (line 98) | public void writeInteger(String key, int value) {
method readLong (line 111) | public long readLong(String key, long defaultValue) {
method writeLong (line 121) | public void writeLong(String key, long value) {
method readBoolean (line 134) | public boolean readBoolean(String key, boolean defaultValue) {
method writeBoolean (line 144) | public void writeBoolean(String key, boolean value) {
method removeParameter (line 155) | public void removeParameter(String key) {
FILE: RtspCamera/src/com/orangelabs/rcs/platform/registry/RegistryFactory.java
class RegistryFactory (line 28) | public abstract class RegistryFactory {
method loadFactory (line 40) | public static void loadFactory(String classname) throws FactoryExcepti...
method getFactory (line 57) | public static RegistryFactory getFactory() {
method readString (line 68) | public abstract String readString(String key, String defaultValue);
method writeString (line 76) | public abstract void writeString(String key, String value);
method readInteger (line 85) | public abstract int readInteger(String key, int defaultValue);
method writeInteger (line 93) | public abstract void writeInteger(String key, int value);
method readLong (line 102) | public abstract long readLong(String key, long defaultValue);
method writeLong (line 110) | public abstract void writeLong(String key, long value);
method readBoolean (line 119) | public abstract boolean readBoolean(String key, boolean defaultValue);
method writeBoolean (line 127) | public abstract void writeBoolean(String key, boolean value);
method removeParameter (line 134) | public abstract void removeParameter(String key);
FILE: RtspCamera/src/com/orangelabs/rcs/provider/settings/RcsSettings.java
class RcsSettings (line 34) | public class RcsSettings {
method createInstance (line 55) | public static synchronized void createInstance(Context ctx) {
method getInstance (line 66) | public static RcsSettings getInstance() {
method RcsSettings (line 75) | private RcsSettings(Context ctx) {
method readParameter (line 87) | public String readParameter(String key) {
method writeParameter (line 105) | public void writeParameter(String key, String value) {
method isServiceActivated (line 117) | public boolean isServiceActivated() {
method setServiceActivationState (line 130) | public void setServiceActivationState(boolean state) {
method isRoamingAuthorized (line 141) | public boolean isRoamingAuthorized() {
method setRoamingAuthorizationState (line 154) | public void setRoamingAuthorizationState(boolean state) {
method getPresenceInvitationRingtone (line 165) | public String getPresenceInvitationRingtone() {
method setPresenceInvitationRingtone (line 178) | public void setPresenceInvitationRingtone(String uri) {
method isPhoneVibrateForPresenceInvitation (line 189) | public boolean isPhoneVibrateForPresenceInvitation() {
method setPhoneVibrateForPresenceInvitation (line 202) | public void setPhoneVibrateForPresenceInvitation(boolean vibrate) {
method getCShInvitationRingtone (line 213) | public String getCShInvitationRingtone() {
method setCShInvitationRingtone (line 226) | public void setCShInvitationRingtone(String uri) {
method isPhoneVibrateForCShInvitation (line 237) | public boolean isPhoneVibrateForCShInvitation() {
method setPhoneVibrateForCShInvitation (line 250) | public void setPhoneVibrateForCShInvitation(boolean vibrate) {
method isPhoneBeepIfCShAvailable (line 261) | public boolean isPhoneBeepIfCShAvailable() {
method setPhoneBeepIfCShAvailable (line 274) | public void setPhoneBeepIfCShAvailable(boolean beep) {
method getCShVideoFormat (line 285) | public String getCShVideoFormat() {
method setCShVideoFormat (line 298) | public void setCShVideoFormat(String fmt) {
method getCShVideoSize (line 309) | public String getCShVideoSize() {
method setCShVideoSize (line 322) | public void setCShVideoSize(String size) {
method getFileTransferInvitationRingtone (line 333) | public String getFileTransferInvitationRingtone() {
method setFileTransferInvitationRingtone (line 346) | public void setFileTransferInvitationRingtone(String uri) {
method isPhoneVibrateForFileTransferInvitation (line 357) | public boolean isPhoneVibrateForFileTransferInvitation() {
method setPhoneVibrateForFileTransferInvitation (line 370) | public void setPhoneVibrateForFileTransferInvitation(boolean vibrate) {
method getChatInvitationRingtone (line 381) | public String getChatInvitationRingtone() {
method setChatInvitationRingtone (line 394) | public void setChatInvitationRingtone(String uri) {
method isPhoneVibrateForChatInvitation (line 405) | public boolean isPhoneVibrateForChatInvitation() {
method setPhoneVibrateForChatInvitation (line 418) | public void setPhoneVibrateForChatInvitation(boolean vibrate) {
method isAutoAcceptModeForChatInvitation (line 429) | public boolean isAutoAcceptModeForChatInvitation(){
method setAutoAcceptModeForChatInvitation (line 442) | public void setAutoAcceptModeForChatInvitation(boolean auto) {
method getPredefinedFreetext1 (line 453) | public String getPredefinedFreetext1() {
method setPredefinedFreetext1 (line 466) | public void setPredefinedFreetext1(String txt) {
method getPredefinedFreetext2 (line 477) | public String getPredefinedFreetext2() {
method setPredefinedFreetext2 (line 490) | public void setPredefinedFreetext2(String txt) {
method getPredefinedFreetext3 (line 501) | public String getPredefinedFreetext3() {
method setPredefinedFreetext3 (line 514) | public void setPredefinedFreetext3(String txt) {
method getPredefinedFreetext4 (line 525) | public String getPredefinedFreetext4() {
method setPredefinedFreetext4 (line 538) | public void setPredefinedFreetext4(String txt) {
method getUserProfileImsUserName (line 549) | public String getUserProfileImsUserName() {
method setUserProfileImsUserName (line 562) | public void setUserProfileImsUserName(String value) {
method getUserProfileImsDisplayName (line 573) | public String getUserProfileImsDisplayName() {
method setUserProfileImsDisplayName (line 586) | public void setUserProfileImsDisplayName(String value) {
method getUserProfileImsPrivateId (line 597) | public String getUserProfileImsPrivateId() {
method setUserProfileImsPrivateId (line 610) | public void setUserProfileImsPrivateId(String uri) {
method getUserProfileImsPassword (line 621) | public String getUserProfileImsPassword() {
method setUserProfileImsPassword (line 634) | public void setUserProfileImsPassword(String pwd) {
method getUserProfileImsDomain (line 645) | public String getUserProfileImsDomain() {
method setUserProfileImsDomain (line 658) | public void setUserProfileImsDomain(String domain) {
method getImsProxyAddrForMobile (line 669) | public String getImsProxyAddrForMobile() {
method setImsProxyAddrForMobile (line 682) | public void setImsProxyAddrForMobile(String addr) {
method getImsProxyPortForMobile (line 693) | public int getImsProxyPortForMobile() {
method setImsProxyPortForMobile (line 708) | public void setImsProxyPortForMobile(int port) {
method getImsProxyAddrForWifi (line 719) | public String getImsProxyAddrForWifi() {
method setImsProxyAddrForWifi (line 732) | public void setImsProxyAddrForWifi(String addr) {
method getImsProxyPortForWifi (line 743) | public int getImsProxyPortForWifi() {
method setImsProxyPortForWifi (line 758) | public void setImsProxyPortForWifi(int port) {
method getXdmServer (line 769) | public String getXdmServer() {
method setXdmServer (line 782) | public void setXdmServer(String addr) {
method getXdmLogin (line 793) | public String getXdmLogin() {
method setXdmLogin (line 806) | public void setXdmLogin(String value) {
method getXdmPassword (line 817) | public String getXdmPassword() {
method setXdmPassword (line 830) | public void setXdmPassword(String value) {
method getImConferenceUri (line 841) | public String getImConferenceUri() {
method setImConferenceUri (line 854) | public void setImConferenceUri(String uri) {
method getEndUserConfirmationRequestUri (line 865) | public String getEndUserConfirmationRequestUri() {
method setEndUserConfirmationRequestUri (line 878) | public void setEndUserConfirmationRequestUri(String uri) {
method getCountryCode (line 889) | public String getCountryCode() {
method setCountryCode (line 902) | public void setCountryCode(String code) {
method getCountryAreaCode (line 913) | public String getCountryAreaCode() {
method setCountryAreaCode (line 926) | public void setCountryAreaCode(String code) {
method getMyCapabilities (line 937) | public Capabilities getMyCapabilities(){
method getMaxPhotoIconSize (line 967) | public int getMaxPhotoIconSize() {
method getMaxFreetextLength (line 982) | public int getMaxFreetextLength() {
method getMaxChatParticipants (line 997) | public int getMaxChatParticipants() {
method getMaxChatMessageLength (line 1012) | public int getMaxChatMessageLength() {
method getChatIdleDuration (line 1027) | public int getChatIdleDuration() {
method getMaxFileTransferSize (line 1042) | public int getMaxFileTransferSize() {
method getWarningMaxFileTransferSize (line 1057) | public int getWarningMaxFileTransferSize() {
method getMaxImageSharingSize (line 1072) | public int getMaxImageSharingSize() {
method getMaxVideoShareDuration (line 1087) | public int getMaxVideoShareDuration() {
method getMaxChatSessions (line 1102) | public int getMaxChatSessions() {
method getMaxFileTransferSessions (line 1117) | public int getMaxFileTransferSessions() {
method isSmsFallbackServiceActivated (line 1132) | public boolean isSmsFallbackServiceActivated() {
method isStoreForwardWarningActivated (line 1145) | public boolean isStoreForwardWarningActivated() {
method getImSessionStartMode (line 1159) | public int getImSessionStartMode() {
method getMaxChatLogEntriesPerContact (line 1174) | public int getMaxChatLogEntriesPerContact() {
method getMaxRichcallLogEntriesPerContact (line 1189) | public int getMaxRichcallLogEntriesPerContact() {
method getImsServicePollingPeriod (line 1204) | public int getImsServicePollingPeriod(){
method getSipListeningPort (line 1219) | public int getSipListeningPort() {
method getSipDefaultProtocolForMobile (line 1234) | public String getSipDefaultProtocolForMobile() {
method getSipDefaultProtocolForWifi (line 1247) | public String getSipDefaultProtocolForWifi() {
method getTlsCertificateRoot (line 1260) | public String getTlsCertificateRoot() {
method getTlsCertificateIntermediate (line 1273) | public String getTlsCertificateIntermediate() {
method getSipTransactionTimeout (line 1286) | public int getSipTransactionTimeout() {
method getDefaultMsrpPort (line 1301) | public int getDefaultMsrpPort() {
method getDefaultRtpPort (line 1316) | public int getDefaultRtpPort() {
method getMsrpTransactionTimeout (line 1331) | public int getMsrpTransactionTimeout() {
method getRegisterExpirePeriod (line 1346) | public int getRegisterExpirePeriod() {
method getRegisterRetryBaseTime (line 1361) | public int getRegisterRetryBaseTime() {
method getRegisterRetryMaxTime (line 1376) | public int getRegisterRetryMaxTime() {
method getPublishExpirePeriod (line 1391) | public int getPublishExpirePeriod() {
method getRevokeTimeout (line 1406) | public int getRevokeTimeout() {
method getImsAuhtenticationProcedureForMobile (line 1421) | public String getImsAuhtenticationProcedureForMobile() {
method getImsAuhtenticationProcedureForWifi (line 1434) | public String getImsAuhtenticationProcedureForWifi() {
method isTelUriFormatUsed (line 1447) | public boolean isTelUriFormatUsed() {
method getRingingPeriod (line 1460) | public int getRingingPeriod() {
method getSubscribeExpirePeriod (line 1475) | public int getSubscribeExpirePeriod() {
method getIsComposingTimeout (line 1490) | public int getIsComposingTimeout() {
method getSessionRefreshExpirePeriod (line 1505) | public int getSessionRefreshExpirePeriod() {
method isPermanentStateModeActivated (line 1520) | public boolean isPermanentStateModeActivated() {
method isTraceActivated (line 1533) | public boolean isTraceActivated() {
method getTraceLevel (line 1546) | public String getTraceLevel() {
method isSipTraceActivated (line 1559) | public boolean isSipTraceActivated() {
method getSipTraceFile (line 1572) | public String getSipTraceFile() {
method isMediaTraceActivated (line 1587) | public boolean isMediaTraceActivated() {
method getCapabilityRefreshTimeout (line 1600) | public int getCapabilityRefreshTimeout() {
method getCapabilityExpiryTimeout (line 1615) | public int getCapabilityExpiryTimeout() {
method getCapabilityPollingPeriod (line 1630) | public int getCapabilityPollingPeriod() {
method isCsVideoSupported (line 1645) | public boolean isCsVideoSupported() {
method isFileTransferSupported (line 1658) | public boolean isFileTransferSupported() {
method isImSessionSupported (line 1671) | public boolean isImSessionSupported() {
method isImageSharingSupported (line 1684) | public boolean isImageSharingSupported() {
method isVideoSharingSupported (line 1697) | public boolean isVideoSharingSupported() {
method isPresenceDiscoverySupported (line 1710) | public boolean isPresenceDiscoverySupported() {
method isSocialPresenceSupported (line 1723) | public boolean isSocialPresenceSupported() {
method getSupportedRcsExtensions (line 1736) | public String getSupportedRcsExtensions() {
method setSupportedRcsExtensions (line 1749) | public void setSupportedRcsExtensions(String extensions) {
method isImAlwaysOn (line 1760) | public boolean isImAlwaysOn() {
method isImReportsActivated (line 1773) | public boolean isImReportsActivated() {
method getNetworkAccess (line 1786) | public int getNetworkAccess() {
method getSipTimerT1 (line 1801) | public int getSipTimerT1() {
method getSipTimerT2 (line 1816) | public int getSipTimerT2() {
method getSipTimerT4 (line 1831) | public int getSipTimerT4() {
method isSipKeepAliveEnabled (line 1846) | public boolean isSipKeepAliveEnabled() {
method getSipKeepAlivePeriod (line 1859) | public int getSipKeepAlivePeriod() {
method getNetworkApn (line 1874) | public String getNetworkApn() {
method getNetworkOperator (line 1887) | public String getNetworkOperator() {
method isGruuSupported (line 1900) | public boolean isGruuSupported() {
method isCpuAlwaysOn (line 1913) | public boolean isCpuAlwaysOn() {
method getAutoConfigMode (line 1926) | public int getAutoConfigMode() {
method removeUserProfile (line 1939) | public void removeUserProfile() {
FILE: RtspCamera/src/com/orangelabs/rcs/provider/settings/RcsSettingsData.java
class RcsSettingsData (line 29) | public class RcsSettingsData {
FILE: RtspCamera/src/com/orangelabs/rcs/service/api/client/capability/Capabilities.java
class Capabilities (line 31) | public class Capabilities implements Parcelable {
method Capabilities (line 80) | public Capabilities() {
method Capabilities (line 88) | public Capabilities(Parcel source) {
method describeContents (line 106) | public int describeContents() {
method writeToParcel (line 116) | public void writeToParcel(Parcel dest, int flags) {
method createFromParcel (line 135) | public Capabilities createFromParcel(Parcel source) {
method newArray (line 139) | public Capabilities[] newArray(int size) {
method isImageSharingSupported (line 149) | public boolean isImageSharingSupported() {
method setImageSharingSupport (line 158) | public void setImageSharingSupport(boolean supported) {
method isVideoSharingSupported (line 167) | public boolean isVideoSharingSupported() {
method setVideoSharingSupport (line 176) | public void setVideoSharingSupport(boolean supported) {
method isImSessionSupported (line 185) | public boolean isImSessionSupported() {
method setImSessionSupport (line 194) | public void setImSessionSupport(boolean supported) {
method isFileTransferSupported (line 203) | public boolean isFileTransferSupported() {
method setFileTransferSupport (line 212) | public void setFileTransferSupport(boolean supported) {
method isCsVideoSupported (line 221) | public boolean isCsVideoSupported() {
method setCsVideoSupport (line 230) | public void setCsVideoSupport(boolean supported) {
method isPresenceDiscoverySupported (line 239) | public boolean isPresenceDiscoverySupported() {
method setPresenceDiscoverySupport (line 248) | public void setPresenceDiscoverySupport(boolean supported) {
method isSocialPresenceSupported (line 257) | public boolean isSocialPresenceSupported() {
method setSocialPresenceSupport (line 266) | public void setSocialPresenceSupport(boolean supported) {
method addSupportedExtension (line 275) | public void addSupportedExtension(String tag) {
method getSupportedExtensions (line 284) | public ArrayList<String> getSupportedExtensions() {
method getTimestamp (line 293) | public long getTimestamp() {
method setTimestamp (line 302) | public void setTimestamp(long timestamp) {
method toString (line 311) | public String toString() {
FILE: RtspCamera/src/com/orangelabs/rcs/service/api/client/media/MediaCodec.java
class MediaCodec (line 35) | public class MediaCodec implements Parcelable {
method MediaCodec (line 51) | public MediaCodec(String codecName) {
method MediaCodec (line 60) | public MediaCodec(Parcel source) {
method describeContents (line 79) | public int describeContents() {
method writeToParcel (line 89) | public void writeToParcel(Parcel dest, int flags) {
method createFromParcel (line 105) | public MediaCodec createFromParcel(Parcel source) {
method newArray (line 109) | public MediaCodec[] newArray(int size) {
method getCodecName (line 119) | public String getCodecName() {
method setCodecName (line 128) | public void setCodecName(String codecName) {
method getStringParam (line 138) | public String getStringParam(String key) {
method getIntParam (line 153) | public int getIntParam(String key, int defaultValue) {
method setParam (line 168) | public void setParam(String key, String value) {
FILE: RtspCamera/src/com/orangelabs/rcs/service/api/client/media/video/VideoCodec.java
class VideoCodec (line 28) | public class VideoCodec {
method VideoCodec (line 81) | public VideoCodec(String codecName, int payload, int clockRate, String...
method VideoCodec (line 98) | public VideoCodec(MediaCodec mediaCodec) {
method getMediaCodec (line 107) | public MediaCodec getMediaCodec() {
method getCodecName (line 116) | public String getCodecName() {
method getPayload (line 125) | public int getPayload() {
method getClockRate (line 134) | public int getClockRate() {
method getCodecParams (line 143) | public String getCodecParams() {
method getFramerate (line 152) | public int getFramerate() {
method getBitrate (line 161) | public int getBitrate() {
method getWidth (line 170) | public int getWidth() {
method getHeight (line 179) | public int getHeight() {
method compare (line 189) | public boolean compare(VideoCodec codec) {
method checkVideoCodec (line 204) | public static boolean checkVideoCodec(MediaCodec[] supportedCodecs, Vi...
FILE: RtspCamera/src/com/orangelabs/rcs/service/api/client/media/video/VideoSurfaceView.java
class VideoSurfaceView (line 33) | public class VideoSurfaceView extends SurfaceView {
method VideoSurfaceView (line 59) | public VideoSurfaceView(Context context) {
method VideoSurfaceView (line 71) | public VideoSurfaceView(Context context, AttributeSet attrs) {
method VideoSurfaceView (line 84) | public VideoSurfaceView(Context context, AttributeSet attrs, int defSt...
method setAspectRatio (line 96) | public void setAspectRatio(int width, int height) {
method setAspectRatio (line 105) | public void setAspectRatio(float ratio) {
method onMeasure (line 119) | protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
method setImage (line 149) | public void setImage(Bitmap bmp) {
method clearImage (line 169) | public void clearImage() {
method init (line 190) | private void init() {
method surfaceChanged (line 200) | public void surfaceChanged(SurfaceHolder _holder, int format, int w,in...
method surfaceCreated (line 203) | public void surfaceCreated(SurfaceHolder _holder) {
method surfaceDestroyed (line 207) | public void surfaceDestroyed(SurfaceHolder _holder) {
FILE: RtspCamera/src/com/orangelabs/rcs/utils/FifoBuffer.java
class FifoBuffer (line 28) | public class FifoBuffer {
method addObject (line 44) | public synchronized void addObject(Object obj) {
method getObject (line 55) | public synchronized Object getObject() {
method getObject (line 80) | public synchronized Object getObject(int timeout) {
method close (line 101) | public synchronized void close() {
FILE: RtspCamera/src/com/orangelabs/rcs/utils/NetworkRessourceManager.java
class NetworkRessourceManager (line 32) | public class NetworkRessourceManager {
method generateLocalSipPort (line 53) | public static synchronized int generateLocalSipPort() {
method generateLocalRtpPort (line 62) | public static synchronized int generateLocalRtpPort() {
method generateLocalMsrpPort (line 71) | public static synchronized int generateLocalMsrpPort() {
method generateLocalUdpPort (line 81) | private static int generateLocalUdpPort(int portBase) {
method isLocalUdpPortFree (line 103) | private static boolean isLocalUdpPortFree(int port) {
method generateLocalTcpPort (line 122) | private static int generateLocalTcpPort(int portBase) {
method isLocalTcpPortFree (line 143) | private static boolean isLocalTcpPortFree(int port) {
method isValidIpAddress (line 162) | public static boolean isValidIpAddress(String ipAddress) {
method ipToInt (line 178) | public static int ipToInt(String addr) {
FILE: RtspCamera/src/com/orangelabs/rcs/utils/logger/Appender.java
class Appender (line 26) | public abstract class Appender {
method Appender (line 30) | public Appender() {
method printTrace (line 40) | public abstract void printTrace(String classname, int level, String tr...
FILE: RtspCamera/src/com/orangelabs/rcs/utils/logger/Logger.java
class Logger (line 28) | public class Logger {
method Logger (line 91) | private Logger(String classname) {
method isActivated (line 105) | public boolean isActivated() {
method debug (line 114) | public void debug(String trace) {
method info (line 123) | public void info(String trace) {
method warn (line 132) | public void warn(String trace) {
method error (line 141) | public void error(String trace) {
method error (line 151) | public void error(String trace, Throwable e) {
method fatal (line 161) | public void fatal(String trace) {
method fatal (line 171) | public void fatal(String trace, Throwable e) {
method printTrace (line 182) | private void printTrace(String trace, int level) {
method setAppenders (line 195) | public static void setAppenders(Appender[] appenders) {
method getLogger (line 205) | public static synchronized Logger getLogger(String classname) {
method getAppenders (line 214) | public static synchronized Appender[] getAppenders() {
FILE: RtspCamera/src/de/kp/net/rtp/RtpPacket.java
class RtpPacket (line 27) | public class RtpPacket {
method getPacket (line 37) | public byte[] getPacket() {
method getLength (line 42) | public int getLength() {
method getHeaderLength (line 47) | public int getHeaderLength() {
method getPayloadLength (line 55) | public int getPayloadLength() {
method setPayloadLength (line 63) | public void setPayloadLength(int len) {
method getVersion (line 80) | public int getVersion() {
method setVersion (line 88) | public void setVersion(int v) {
method hasPadding (line 94) | public boolean hasPadding() {
method setPadding (line 102) | public void setPadding(boolean p) {
method hasExtension (line 108) | public boolean hasExtension() {
method setExtension (line 116) | public void setExtension(boolean x) {
method getCscrCount (line 122) | public int getCscrCount() {
method hasMarker (line 130) | public boolean hasMarker() {
method setMarker (line 138) | public void setMarker(boolean m) {
method getPayloadType (line 144) | public int getPayloadType() {
method setPayloadType (line 152) | public void setPayloadType(int pt) {
method getSequenceNumber (line 158) | public int getSequenceNumber() {
method setSequenceNumber (line 166) | public void setSequenceNumber(int sn) {
method getTimestamp (line 172) | public long getTimestamp() {
method setTimestamp (line 180) | public void setTimestamp(long timestamp) {
method getSscr (line 186) | public long getSscr() {
method setSscr (line 194) | public void setSscr(long ssrc) {
method getCscrList (line 200) | public long[] getCscrList() {
method setCscrList (line 209) | public void setCscrList(long[] cscr) {
method setPayload (line 223) | public void setPayload(byte[] payload, int len) {
method getPayload (line 233) | public byte[] getPayload() {
method RtpPacket (line 243) | public RtpPacket(byte[] buffer, int packet_length) {
method init (line 252) | public void init(int ptype) {
method init (line 257) | public void init(int ptype, long sscr) {
method init (line 262) | public void init(int ptype, int seqn, long timestamp, long sscr) {
method getLong (line 278) | private static long getLong(byte[] data, int begin, int end) {
method setLong (line 288) | private static void setLong(long n, byte[] data, int begin, int end) {
method getInt (line 296) | private static int getInt(byte[] data, int begin, int end) {
method setInt (line 301) | private static void setInt(int n, byte[] data, int begin, int end) {
method getBit (line 306) | private static boolean getBit(byte b, int bit) {
method setBit (line 311) | private static byte setBit(boolean value, byte b, int bit) {
FILE: RtspCamera/src/de/kp/net/rtp/RtpRandom.java
class RtpRandom (line 30) | public class RtpRandom {
method setSeed (line 51) | public static void setSeed(long seed) {
method nextInt (line 56) | public static int nextInt() {
method nextInt (line 61) | public static int nextInt(int n) {
method nextLong (line 66) | public static long nextLong() {
method nextBoolean (line 71) | public static boolean nextBoolean() {
method nextBytes (line 76) | public static byte[] nextBytes(int len) {
method nextString (line 84) | public static String nextString(int len) {
method nextNumString (line 94) | public static String nextNumString(int len) {
method nextHexString (line 102) | public static String nextHexString(int len) {
FILE: RtspCamera/src/de/kp/net/rtp/RtpSender.java
class RtpSender (line 14) | public class RtpSender {
method RtpSender (line 24) | private RtpSender() {
method getReceiverCount (line 28) | public int getReceiverCount() {
method getInstance (line 32) | public static RtpSender getInstance() {
method addReceiver (line 42) | public void addReceiver(RtpSocket receiver) {
method removeReceiver (line 50) | public void removeReceiver(RtpSocket receiver) {
method send (line 61) | public synchronized void send(RtpPacket rtpPacket) throws IOException {
method send (line 76) | public synchronized void send(byte[] data) throws IOException {
method clear (line 87) | public void clear() {
method stop (line 91) | public void stop() {
FILE: RtspCamera/src/de/kp/net/rtp/RtpSocket.java
class RtpSocket (line 37) | public class RtpSocket {
method RtpSocket (line 58) | public RtpSocket(InetAddress remoteAddress, int remotePort) throws Soc...
method RtpSocket (line 71) | public RtpSocket(DatagramSocket socket, InetAddress remoteAddress, int...
method getSocket (line 84) | public DatagramSocket getSocket() {
method receive (line 89) | public void receive(RtpPacket rtpPacket) throws IOException {
method send (line 103) | public void send(RtpPacket rtpPacket) throws IOException {
method send (line 118) | public void send(byte[] data) throws IOException {
method suspend (line 132) | public void suspend(boolean suspended) {
method close (line 137) | public void close() { // socket.close();
FILE: RtspCamera/src/de/kp/net/rtp/packetizer/AbstractPacketizer.java
class AbstractPacketizer (line 8) | abstract public class AbstractPacketizer extends Thread {
method AbstractPacketizer (line 14) | public AbstractPacketizer() {
method AbstractPacketizer (line 18) | public AbstractPacketizer(Runnable runnable) {
method AbstractPacketizer (line 22) | public AbstractPacketizer(String threadName) {
method AbstractPacketizer (line 26) | public AbstractPacketizer(Runnable runnable, String threadName) {
method AbstractPacketizer (line 30) | public AbstractPacketizer(ThreadGroup group, Runnable runnable) {
method AbstractPacketizer (line 34) | public AbstractPacketizer(ThreadGroup group, String threadName) {
method AbstractPacketizer (line 38) | public AbstractPacketizer(ThreadGroup group, Runnable runnable, String...
method AbstractPacketizer (line 42) | public AbstractPacketizer(ThreadGroup group, Runnable runnable, String...
method startStreaming (line 46) | public void startStreaming() {
method stopStreaming (line 51) | public void stopStreaming() {
FILE: RtspCamera/src/de/kp/net/rtp/packetizer/H263Packetizer.java
class H263Packetizer (line 13) | public class H263Packetizer extends AbstractPacketizer implements Runnab...
method H263Packetizer (line 22) | public H263Packetizer(InputStream fis) throws SocketException {
method run (line 27) | public void run() {
FILE: RtspCamera/src/de/kp/net/rtp/packetizer/H264Fifo.java
class H264Fifo (line 23) | public class H264Fifo {
method H264Fifo (line 28) | public H264Fifo(int length) {
method write (line 33) | public void write(byte[] buffer, int offset, int length) {
method read (line 48) | public int read(byte[] buffer, int offset, int length) {
method available (line 66) | public int available() {
FILE: RtspCamera/src/de/kp/net/rtp/packetizer/H264Packetizer.java
class H264Packetizer (line 14) | public class H264Packetizer extends AbstractPacketizer implements Runnab...
method H264Packetizer (line 31) | public H264Packetizer(InputStream fis) throws SocketException {
method run (line 36) | public void run() {
method skipMDAT (line 198) | private void skipMDAT() throws IOException {
method fillFifo (line 216) | private void fillFifo() {
method printBuffer (line 273) | protected String printBuffer(int start,int end) {
FILE: RtspCamera/src/de/kp/net/rtp/recorder/MediaRtpSender.java
class MediaRtpSender (line 35) | public class MediaRtpSender {
method MediaRtpSender (line 61) | public MediaRtpSender(Format format) {
method prepareSession (line 65) | public void prepareSession(MediaInput player) throws RtpException {
method startSession (line 104) | public void startSession() {
method stopSession (line 118) | public void stopSession() {
FILE: RtspCamera/src/de/kp/net/rtp/recorder/RtspVideoRecorder.java
class RtspVideoRecorder (line 49) | public class RtspVideoRecorder extends IMediaPlayer.Stub implements Came...
method RtspVideoRecorder (line 123) | public RtspVideoRecorder() {
method RtspVideoRecorder (line 131) | public RtspVideoRecorder(VideoCodec codec) {
method RtspVideoRecorder (line 141) | public RtspVideoRecorder(String codec) {
method getLocalRtpPort (line 156) | public int getLocalRtpPort() {
method getVideoStartTime (line 165) | public long getVideoStartTime() {
method isOpened (line 174) | public boolean isOpened() {
method isStarted (line 183) | public boolean isStarted() {
method open (line 193) | public void open(String remoteHost, int remotePort) {
method open (line 198) | public void open() {
method close (line 286) | public void close() {
method start (line 318) | public synchronized void start() {
method stop (line 341) | public void stop() {
method addListener (line 365) | public void addListener(IMediaEventListener listener) {
method removeAllListeners (line 372) | public void removeAllListeners() {
method getSupportedMediaCodecs (line 381) | public MediaCodec[] getSupportedMediaCodecs() {
method getMediaCodec (line 390) | public MediaCodec getMediaCodec() {
method setMediaCodec (line 402) | public void setMediaCodec(MediaCodec mediaCodec) {
method onPreviewFrame (line 429) | public void onPreviewFrame(byte[] data, Camera camera) {
class CameraBuffer (line 437) | private class CameraBuffer {
method setFrame (line 449) | public void setFrame(byte[] frame) {
method getFrame (line 458) | public byte[] getFrame() {
method run (line 475) | public void run() {
class MediaRtpInput (line 538) | private static class MediaRtpInput implements MediaInput {
method MediaRtpInput (line 547) | public MediaRtpInput() {
method addFrame (line 556) | public void addFrame(byte[] data, long timestamp) {
method open (line 565) | public void open() {
method close (line 572) | public void close() {
method readSample (line 585) | public MediaSample readSample() throws MediaException {
FILE: RtspCamera/src/de/kp/net/rtp/stream/RtpOutputStream.java
class RtpOutputStream (line 39) | public class RtpOutputStream implements ProcessorOutputStream {
method RtpOutputStream (line 56) | public RtpOutputStream() {
method open (line 63) | public void open() throws Exception {
method close (line 66) | public void close() {
method write (line 75) | public void write(Buffer buffer) throws IOException {
method buildRtpPacket (line 96) | private RtpPacket buildRtpPacket(Buffer buffer) {
method transmit (line 135) | private void transmit(Packet packet) {
FILE: RtspCamera/src/de/kp/net/rtp/viewer/RtpVideoRenderer.java
class RtpVideoRenderer (line 59) | public class RtpVideoRenderer extends IMediaRenderer.Stub {
method RtpVideoRenderer (line 143) | public RtpVideoRenderer(String uri) throws Exception {
method setVideoSurface (line 209) | public void setVideoSurface(VideoSurfaceView surface) {
method getVideoStartTime (line 218) | public long getVideoStartTime() {
method getLocalRtpPort (line 227) | public int getLocalRtpPort() {
method reservePort (line 236) | private void reservePort(int port) {
method releasePort (line 253) | private void releasePort() {
method isOpened (line 270) | public boolean isOpened() {
method isStarted (line 279) | public boolean isStarted() {
method open (line 286) | public void open() {
method close (line 364) | public void close() {
method closeVideoDecoder (line 383) | public void closeVideoDecoder() {
method start (line 407) | public void start() {
method stop (line 436) | public void stop() {
method addListener (line 465) | public void addListener(IMediaEventListener listener) {
method removeAllListeners (line 472) | public void removeAllListeners() {
method getSupportedMediaCodecs (line 481) | public MediaCodec[] getSupportedMediaCodecs() {
method getMediaCodec (line 490) | public MediaCodec getMediaCodec() {
method setMediaCodec (line 502) | public void setMediaCodec(MediaCodec mediaCodec) {
class MediaRtpOutput (line 520) | private class MediaRtpOutput implements MediaOutput {
method MediaRtpOutput (line 534) | public MediaRtpOutput() {
method open (line 544) | public void open() {
method close (line 550) | public void close() {
method writeSample (line 558) | public void writeSample(MediaSample sample) {
method open (line 582) | @Override
FILE: RtspCamera/src/de/kp/net/rtsp/RtspConstants.java
class RtspConstants (line 10) | public class RtspConstants {
type VideoEncoder (line 45) | public static enum VideoEncoder {
method getLocalIpAddress (line 90) | public static String getLocalIpAddress() {
FILE: RtspCamera/src/de/kp/net/rtsp/client/RtspClient.java
class RtspClient (line 47) | public class RtspClient implements TransportListener {
method RtspClient (line 68) | public RtspClient() {
method getTransport (line 79) | public Transport getTransport() {
method setSession (line 83) | public void setSession(SessionHeader session) {
method getMessageFactory (line 87) | public MessageFactory getMessageFactory() {
method getURI (line 91) | public URI getURI() {
method options (line 95) | public void options(String uri, URI endpoint) {
method play (line 108) | public void play() {
method pause (line 119) | public void pause() {
method record (line 129) | public void record() throws IOException {
method setRequestListener (line 133) | public void setRequestListener(RequestListener listener) {
method getRequestListener (line 137) | public RequestListener getRequestListener() {
method setTransport (line 141) | public void setTransport(Transport transport) {
method describe (line 146) | public void describe(URI uri, String resource) {
method setup (line 162) | public void setup(URI uri, int localPort) {
method setup (line 175) | public void setup(URI uri, int localPort, String resource) {
method teardown (line 193) | public void teardown() {
method dataReceived (line 207) | public void dataReceived(Transport t, byte[] data, int size) throws Th...
method dataSent (line 238) | @Override
method error (line 243) | @Override
method error (line 248) | @Override
method remoteDisconnection (line 254) | @Override
method nextCSeq (line 265) | public int nextCSeq() {
method send (line 269) | public void send(Message message) throws Exception {
method send (line 273) | private void send(Message message, URI endpoint) throws Exception
method getSetup (line 296) | private Request getSetup(String uri, int localPort, RtspHeader... head...
method connected (line 301) | @Override
FILE: RtspCamera/src/de/kp/net/rtsp/client/RtspControl.java
class RtspControl (line 12) | public class RtspControl implements RequestListener {
method RtspControl (line 40) | public RtspControl(String uri) {
method RtspControl (line 76) | public RtspControl(String uri, String resource) {
method play (line 111) | public void play() {
method pause (line 121) | public void pause() {
method stop (line 131) | public void stop() {
method isConnected (line 140) | public boolean isConnected() {
method getState (line 144) | public int getState() {
method getClientPort (line 148) | public int getClientPort() {
method getDescriptor (line 152) | public RtspDescriptor getDescriptor() {
method onError (line 156) | @Override
method onDescriptor (line 171) | public void onDescriptor(RtspClient client, String descriptor) {
method onFailure (line 175) | public void onFailure(RtspClient client, Request request, Throwable ca...
method onSuccess (line 188) | public void onSuccess(RtspClient client, Request request, Response res...
FILE: RtspCamera/src/de/kp/net/rtsp/client/api/EntityMessage.java
type EntityMessage (line 26) | public interface EntityMessage {
method getContent (line 28) | public RtspContent getContent();
method setContent (line 30) | public void setContent(RtspContent content);
method getMessage (line 32) | public Message getMessage();
method getBytes (line 34) | public byte[] getBytes() throws Exception;
method isEntity (line 36) | public boolean isEntity();
FILE: RtspCamera/src/de/kp/net/rtsp/client/api/Message.java
type Message (line 27) | public interface Message {
method getLine (line 39) | public String getLine();
method getHeader (line 49) | public RtspHeader getHeader(String name) throws Exception;
method getCSeq (line 56) | public CSeqHeader getCSeq();
method getHeaders (line 62) | public RtspHeader[] getHeaders();
method addHeader (line 70) | public void addHeader(RtspHeader header);
method getBytes (line 76) | public byte[] getBytes() throws Exception;
method getEntityMessage (line 82) | public EntityMessage getEntityMessage();
method setEntityMessage (line 90) | public Message setEntityMessage(EntityMessage entity);
FILE: RtspCamera/src/de/kp/net/rtsp/client/api/MessageFactory.java
type MessageFactory (line 30) | public interface MessageFactory {
method incomingMessage (line 32) | public void incomingMessage(MessageBuffer message) throws Exception;
method outgoingRequest (line 34) | public Request outgoingRequest(String uri, Request.Method method, int ...
method outgoingRequest (line 36) | public Request outgoingRequest(RtspContent body, String uri, Request.M...
method outgoingResponse (line 38) | public Response outgoingResponse(int code, String message, int cseq, R...
method outgoingResponse (line 40) | public Response outgoingResponse(RtspContent body, int code, String te...
FILE: RtspCamera/src/de/kp/net/rtsp/client/api/Request.java
type Request (line 28) | public interface Request extends Message {
type Method (line 30) | enum Method {
method setLine (line 34) | public void setLine(Method method, String uri) throws URISyntaxException;
method getMethod (line 36) | public Method getMethod();
method getURI (line 38) | public String getURI();
method handleResponse (line 40) | public void handleResponse(RtspClient client, Response response);
FILE: RtspCamera/src/de/kp/net/rtsp/client/api/RequestListener.java
type RequestListener (line 26) | public interface RequestListener {
method onDescriptor (line 28) | public void onDescriptor(RtspClient client, String descriptor);
method onError (line 30) | public void onError(RtspClient client, Throwable error);
method onFailure (line 32) | public void onFailure(RtspClient client, Request request, Throwable ca...
method onSuccess (line 34) | public void onSuccess(RtspClient client, Request request, Response res...
FILE: RtspCamera/src/de/kp/net/rtsp/client/api/Response.java
type Response (line 24) | public interface Response extends Message {
method setLine (line 26) | pub
Condensed preview — 388 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (4,000K chars).
[
{
"path": "README.md",
"chars": 487,
"preview": "# RTSP-Camera-for-Android\nAndroid based RTSP Server which is able to serve live camera view to multiple RTSP clients, su"
},
{
"path": "RtspCamera/.classpath",
"chars": 380,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\r\n<classpath>\r\n\t<classpathentry kind=\"src\" path=\"src\"/>\r\n\t<classpathentry kind=\"sr"
},
{
"path": "RtspCamera/.gitignore",
"chars": 10,
"preview": "/bin\n/gen\n"
},
{
"path": "RtspCamera/.project",
"chars": 846,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\r\n<projectDescription>\r\n\t<name>RtspCamera</name>\r\n\t<comment></comment>\r\n\t<projects"
},
{
"path": "RtspCamera/.settings/org.jboss.ide.eclipse.as.core.prefs",
"chars": 95,
"preview": "eclipse.preferences.version=1\r\norg.jboss.ide.eclipse.as.core.singledeployable.deployableList=\r\n"
},
{
"path": "RtspCamera/AndroidManifest.xml",
"chars": 1186,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n package="
},
{
"path": "RtspCamera/gpl.txt",
"chars": 35147,
"preview": " GNU GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n\n Copyright (C) 2007 Free "
},
{
"path": "RtspCamera/jni/Android.mk",
"chars": 37,
"preview": "include $(call all-subdir-makefiles)\n"
},
{
"path": "RtspCamera/jni/Application.mk",
"chars": 147,
"preview": "APP_PROJECT_PATH := /arwa/git/RTSP-Camera-for-Android/RtspCamera\nAPP_MODULES := libH264Decoder libH264Encoder libH2"
},
{
"path": "RtspCamera/jni/avc_h264/Android.mk",
"chars": 63,
"preview": "AVC_ROOT:= $(call my-dir)\ninclude $(call all-subdir-makefiles)\n"
},
{
"path": "RtspCamera/jni/avc_h264/common/include/avcapi_common.h",
"chars": 8679,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/include/avcint_common.h",
"chars": 34990,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/include/avclib_common.h",
"chars": 22502,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/src/deblock.cpp",
"chars": 57517,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/src/dpb.cpp",
"chars": 24296,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/src/fmo.cpp",
"chars": 9087,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/src/mb_access.cpp",
"chars": 14823,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/common/src/reflist.cpp",
"chars": 16755,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/Android.mk",
"chars": 1933,
"preview": "#\n# Copyright (C) 2008 The Android Open Source Project\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\""
},
{
"path": "RtspCamera/jni/avc_h264/dec/include/avcdec_api.h",
"chars": 8954,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/include/pvavcdecoder.h",
"chars": 2041,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/include/pvavcdecoder_factory.h",
"chars": 1727,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/include/pvavcdecoderinterface.h",
"chars": 1975,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/3GPVideoParser.cpp",
"chars": 9507,
"preview": "/*\n * Copyright (C) 2009 OrangeLabs\n * 3GPVideoParser.cpp\n *\n * Created on: 12 août 2009\n * Author: rglt1266\n */\n#"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/3GPVideoParser.h",
"chars": 1563,
"preview": "/*\n * Copyright (C) 2009 OrangeLabs\n * 3GPVideoParser.h\n *\n * Created on: 12 août 2009\n * Author: rglt1266\n */\n\n#i"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/NativeH264Decoder.cpp",
"chars": 9034,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 2009 OrangeLabs\n *\n * Author: Ale"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/NativeH264Decoder.h",
"chars": 3496,
"preview": "/* DO NOT EDIT THIS FILE - it is machine generated */\n#include <jni.h>\n/* Header for class com_orangelabs_rcs_core_ims_p"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/avc_bitstream.cpp",
"chars": 10618,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/avcdec_api.cpp",
"chars": 42740,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/avcdec_bitstream.h",
"chars": 4698,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/avcdec_int.h",
"chars": 3263,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/avcdec_lib.h",
"chars": 23489,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/header.cpp",
"chars": 48619,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/itrans.cpp",
"chars": 8421,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/pred_inter.cpp",
"chars": 83009,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/pred_intra.cpp",
"chars": 49504,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/pvavcdecoder.cpp",
"chars": 7424,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/pvavcdecoder_factory.cpp",
"chars": 1684,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/residual.cpp",
"chars": 15680,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/slice.cpp",
"chars": 26065,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/vlc.cpp",
"chars": 25948,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/yuv2rgb.cpp",
"chars": 892,
"preview": "/*\n * yuv2rgb.cpp\n *\n * Created on: 29 juil. 2009\n * Author: rglt1266\n */\n#include <stdio.h>\n#include \"yuv2rgb.h\"\n"
},
{
"path": "RtspCamera/jni/avc_h264/dec/src/yuv2rgb.h",
"chars": 228,
"preview": "/*\n * yuv2rgb.h\n *\n * Created on: 29 juil. 2009\n * Author: rglt1266\n */\n\n#include \"oscl_types.h\"\n\n#ifndef YUV2RGB_"
},
{
"path": "RtspCamera/jni/avc_h264/enc/Android.mk",
"chars": 2017,
"preview": "#\n# Copyright (C) 2008 The Android Open Source Project\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\""
},
{
"path": "RtspCamera/jni/avc_h264/enc/include/pvavcencoder.h",
"chars": 4166,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/include/pvavcencoder_factory.h",
"chars": 1723,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/include/pvavcencoderinterface.h",
"chars": 15219,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/NativeH264Encoder.cpp",
"chars": 7945,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 2009 OrangeLabs\n *\n * Author: Ale"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/NativeH264Encoder.cpp__orig",
"chars": 6607,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 2009 OrangeLabs\n *\n * Author: Ale"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/NativeH264Encoder.h",
"chars": 1588,
"preview": "/* DO NOT EDIT THIS FILE - it is machine generated */\n#include <jni.h>\n/* Header for class com_orangelabs_rcs_core_ims_p"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/avcenc_api.cpp",
"chars": 39026,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/avcenc_api.h",
"chars": 15752,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/avcenc_int.h",
"chars": 17887,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/avcenc_lib.h",
"chars": 44107,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/bitstream_io.cpp",
"chars": 12731,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/block.cpp",
"chars": 36452,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/findhalfpel.cpp",
"chars": 17327,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/header.cpp",
"chars": 33501,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/init.cpp",
"chars": 36712,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/intra_est.cpp",
"chars": 60986,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/motion_comp.cpp",
"chars": 77070,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/motion_est.cpp",
"chars": 56332,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/pvavcencoder.cpp",
"chars": 27384,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/pvavcencoder_factory.cpp",
"chars": 1677,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/rate_control.cpp",
"chars": 37204,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/residual.cpp",
"chars": 10755,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/sad.cpp",
"chars": 8425,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/sad_halfpel.cpp",
"chars": 16439,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/sad_halfpel_inline.h",
"chars": 2446,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/sad_inline.h",
"chars": 13407,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/sad_mb_offset.h",
"chars": 8978,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/slice.cpp",
"chars": 32710,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/enc/src/vlc_encode.cpp",
"chars": 10274,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_base.h",
"chars": 11437,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_base_macros.h",
"chars": 927,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_config.h",
"chars": 989,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_dll.h",
"chars": 1154,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_error.h",
"chars": 864,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_error_codes.h",
"chars": 1609,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_exception.h",
"chars": 856,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_math.h",
"chars": 1084,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_mem.h",
"chars": 1718,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_string.h",
"chars": 1507,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/oscl_types.h",
"chars": 2708,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/avc_h264/oscl/osclconfig_compiler_warnings.h",
"chars": 1691,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/Android.mk",
"chars": 37,
"preview": "include $(call all-subdir-makefiles)\n"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/Android.mk",
"chars": 2333,
"preview": "#\n# Copyright (C) 2008 The Android Open Source Project\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\""
},
{
"path": "RtspCamera/jni/m4v_h263/dec/include/mp4dec_api.h",
"chars": 6602,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/include/pvm4vdecoder.h",
"chars": 7491,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/include/pvm4vdecoder_dpi.h",
"chars": 7140,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/include/pvm4vdecoder_factory.h",
"chars": 1735,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/include/pvvideodecoderinterface.h",
"chars": 3475,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/include/visual_header.h",
"chars": 1572,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_base.h",
"chars": 11437,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_base_macros.h",
"chars": 927,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_config.h",
"chars": 989,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_dll.h",
"chars": 1154,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_error.h",
"chars": 864,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_error_codes.h",
"chars": 1609,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_exception.h",
"chars": 856,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_math.h",
"chars": 1084,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_mem.h",
"chars": 1718,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/oscl_types.h",
"chars": 2668,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/oscl/osclconfig_compiler_warnings.h",
"chars": 1691,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/3GPVideoParser.cpp",
"chars": 9507,
"preview": "/*\n * Copyright (C) 2009 OrangeLabs\n * 3GPVideoParser.cpp\n *\n * Created on: 12 août 2009\n * Author: rglt1266\n */\n#"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/3GPVideoParser.h",
"chars": 1563,
"preview": "/*\n * Copyright (C) 2009 OrangeLabs\n * 3GPVideoParser.h\n *\n * Created on: 12 août 2009\n * Author: rglt1266\n */\n\n#i"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/adaptive_smooth_no_mmx.cpp",
"chars": 17177,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/bitstream.cpp",
"chars": 34210,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/bitstream.h",
"chars": 5701,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/block_idct.cpp",
"chars": 30688,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/cal_dc_scaler.cpp",
"chars": 7080,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/chv_filter.cpp",
"chars": 32264,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/chvr_filter.cpp",
"chars": 28899,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.cpp",
"chars": 9789,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 2009 OrangeLabs\n *\n * Author: Ale"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_decoder_NativeH263Decoder.h",
"chars": 3649,
"preview": "/* DO NOT EDIT THIS FILE - it is machine generated */\n#include <jni.h>\n/* Header for class com_orangelabs_rcs_core_ims_p"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/combined_decode.cpp",
"chars": 29137,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/conceal.cpp",
"chars": 6264,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/datapart_decode.cpp",
"chars": 27798,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/dcac_prediction.cpp",
"chars": 13072,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/dec_pred_intra_dc.cpp",
"chars": 2477,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/deringing_chroma.cpp",
"chars": 7414,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/deringing_luma.cpp",
"chars": 8461,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/find_min_max.cpp",
"chars": 6927,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/get_pred_adv_b_add.cpp",
"chars": 43157,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/get_pred_outside.cpp",
"chars": 18784,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/idct.cpp",
"chars": 18703,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/idct.h",
"chars": 4432,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/idct_vca.cpp",
"chars": 16469,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/max_level.h",
"chars": 5120,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/mb_motion_comp.cpp",
"chars": 20437,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/mb_utils.cpp",
"chars": 3106,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/mbtype_mode.h",
"chars": 1508,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/motion_comp.h",
"chars": 4166,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/mp4dec_lib.h",
"chars": 13963,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/mp4def.h",
"chars": 5440,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/mp4lib_int.h",
"chars": 11966,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/packet_util.cpp",
"chars": 8076,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/post_filter.cpp",
"chars": 18731,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/post_proc.h",
"chars": 2986,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/post_proc_semaphore.cpp",
"chars": 10323,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/pp_semaphore_chroma_inter.cpp",
"chars": 9577,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/pp_semaphore_luma.cpp",
"chars": 14190,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/pvdec_api.cpp",
"chars": 69158,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/pvm4vdecoder.cpp",
"chars": 6514,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/pvm4vdecoder_factory.cpp",
"chars": 1690,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/scaling.h",
"chars": 1879,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/scaling_tab.cpp",
"chars": 3886,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/vlc_dec_tab.h",
"chars": 7289,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/vlc_decode.cpp",
"chars": 46076,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/vlc_decode.h",
"chars": 4751,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/vlc_dequant.cpp",
"chars": 33987,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/vlc_tab.cpp",
"chars": 35784,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/vop.cpp",
"chars": 52887,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/yuv2rgb.cpp",
"chars": 902,
"preview": "/*\n * yuv2rgb.cpp\n *\n * Created on: 29 juil. 2009\n * Author: rglt1266\n */\n#include <stdio.h>\n#include \"yuv2rgb.h\"\n"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/yuv2rgb.h",
"chars": 227,
"preview": "/*\n * yuv2rgb.h\n *\n * Created on: 29 juil. 2009\n * Author: rglt1266\n */\n\n#include \"oscl_types.h\"\n\n#ifndef YUV2RGB_"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/zigzag.h",
"chars": 2742,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/dec/src/zigzag_tab.cpp",
"chars": 4118,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/Android.mk",
"chars": 1904,
"preview": "#\n# Copyright (C) 2008 The Android Open Source Project\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\""
},
{
"path": "RtspCamera/jni/m4v_h263/enc/include/cvei.h",
"chars": 17994,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/include/mp4enc_api.h",
"chars": 23143,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/include/pvm4vencoder.h",
"chars": 6817,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_base.h",
"chars": 11437,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_base_macros.h",
"chars": 927,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_config.h",
"chars": 989,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_dll.h",
"chars": 1154,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_error.h",
"chars": 864,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_error_codes.h",
"chars": 1609,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_exception.h",
"chars": 856,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_math.h",
"chars": 1084,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_mem.h",
"chars": 1718,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/oscl_types.h",
"chars": 2668,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/oscl/osclconfig_compiler_warnings.h",
"chars": 1691,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/bitstream_io.cpp",
"chars": 31616,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/bitstream_io.h",
"chars": 2494,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.cpp",
"chars": 11675,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 2009 OrangeLabs\n *\n * Author: Ale"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/com_orangelabs_rcs_core_ims_protocol_rtp_codec_video_h263_encoder_NativeH263Encoder.h",
"chars": 1459,
"preview": "/* DO NOT EDIT THIS FILE - it is machine generated */\n#include <jni.h>\n/* Header for class com_orangelabs_rcs_core_ims_p"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/combined_encode.cpp",
"chars": 27710,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/datapart_encode.cpp",
"chars": 18817,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/dct.cpp",
"chars": 37343,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/dct.h",
"chars": 8408,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/dct_inline.h",
"chars": 8592,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/fastcodemb.cpp",
"chars": 23878,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/fastcodemb.h",
"chars": 4586,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/fastidct.cpp",
"chars": 53479,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/fastquant.cpp",
"chars": 29794,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/fastquant_inline.h",
"chars": 14819,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/findhalfpel.cpp",
"chars": 9097,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/m4venc_oscl.h",
"chars": 1760,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/me_utils.cpp",
"chars": 9877,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/motion_comp.cpp",
"chars": 66713,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/motion_est.cpp",
"chars": 57219,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/mp4def.h",
"chars": 5811,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/mp4enc_api.cpp",
"chars": 129284,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/mp4enc_api.cpp.original",
"chars": 128872,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/mp4enc_lib.h",
"chars": 10639,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/mp4lib_int.h",
"chars": 19802,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/pvm4vencoder.cpp",
"chars": 43529,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2010 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/rate_control.cpp",
"chars": 39001,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/rate_control.h",
"chars": 4049,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
},
{
"path": "RtspCamera/jni/m4v_h263/enc/src/sad.cpp",
"chars": 11816,
"preview": "/* ------------------------------------------------------------------\n * Copyright (C) 1998-2009 PacketVideo\n *\n * Licen"
}
]
// ... and 188 more files (download for full content)
About this extraction
This page contains the full source code of the spex66/RTSP-Camera-for-Android GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 388 files (3.6 MB), approximately 975.0k tokens, and a symbol index with 2439 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.