view VideoPlayer.h @ 2344:13e15d77b0f8

Moving functions from unsorted_subs.h pt6
author Grumpy7
date Sun, 06 Apr 2014 21:27:02 +0200
parents ddb803517a48
children 182effc4b0ee
line wrap: on
line source

#pragma once
#include "OSWindow.h"
#include "Texture.h"





#pragma pack(push, 1)



struct _BINK
{
  int uWidth;
  int uHeight;
};

struct _BINK_1_5_21_0: public _BINK
{
  int uNumFrames;
  int uCurrentFrame;
  int Data1;
  int uFrameRate;
  int field_18[3];
  int uFlags;
};

struct _BINK_3_0_0_0: public _BINK
{
  int unk_0;
  int unk_1;
  int uNumFrames;
  int uCurrentFrame;
  int _unk2;
  int _unk_[10];
};


struct _BINKBUF
{
  int uWidth;
  int uHeight;
  int field_8;
  int field_C;
  int uBinkDDSurfaceType;
  void *pDDrawSurfaceData;
  int uDDrawSurfacePitch;
  int field_1C;
  int field_20;
  int target_width;
  int target_height;
  int field_2C;
  int field_30;
  int field_34;
  int field_38;
  int field_3C;
  int field_40;
  int field_44;
  struct IDirectDrawSurface *pTargetDDrawSurface;
  int field_4C;
  int uRectX;
  int uRectY;
  HWND hWnd;
  int field_5C;
  float field_60;
  float field_64;
  int field_68;
  int field_6C;
  void *pDDrawSurfaceData_;
  int field_74;
  int field_78;
};
struct _BINKBUF_1_5_21_0: public _BINKBUF
{
  int field_7C;
  int field_80;
  int field_84;
  int field_88;
  int field_8C;
  int field_90;
  int field_94;
  int field_98;
  int field_9C;
  int field_A0;
};

struct _BINKBUF_3_0_0_0: public _BINKBUF
{
};
#pragma pack(pop)




#pragma pack(push, 1)
struct MovieHeader
{
  char pVideoName[40];
  unsigned int uFileOffset;
};
#pragma pack(pop)





void ShowMM7IntroVideo_and_LoadingScreen();

extern "C"
{
#include "lib/libavcodec/avcodec.h"
#include "lib/libavformat/avformat.h"
#include "lib/libavutil/avutil.h"
#include "lib/libavutil/imgutils.h"
#include "lib/libswscale/swscale.h"
#include "lib/libswresample/swresample.h"
#include "lib/libavutil/opt.h"
	//#include "libavutil/samplefmt.h"
}
#pragma comment(lib, "avcodec.lib")
#pragma comment(lib, "avformat.lib")
#pragma comment(lib, "avutil.lib")
#pragma comment(lib, "swscale.lib")
#pragma comment(lib, "swresample.lib")

#include "lib/OpenAL/al.h"
#include "lib/OpenAL/alc.h"
#pragma comment(lib, "OpenAL32.lib")



template<int MAX_SAMPLES_BUFFERS>
class OpenALSoundProviderGeneric
{
public:
	inline OpenALSoundProviderGeneric()
	{
		this->device = nullptr;
		this->context = nullptr;
		this->samples_current_buffer = 0;

		this->samples_source_id = -1;
		for (int i = 0; i < MAX_SAMPLES_BUFFERS; ++i)
			samples_buffer_id[i] = -1;
	}

	inline bool Initialize()
	{
		auto device_names = alcGetString(nullptr, ALC_ALL_DEVICES_SPECIFIER);
		if (!device_names)
			device_names = alcGetString(nullptr, ALC_DEVICE_SPECIFIER);
		if (device_names)
		{
			for (auto device_name = device_names; device_name[0]; device_name += strlen(device_name))
			{
				continue;
			}
		}

		device = alcOpenDevice(nullptr);
		if (!device || CheckError())
			return false;

		context = alcCreateContext(device, nullptr);
		if (!context || CheckError())
			return Release(), false;

		alcMakeContextCurrent(context);

		bool eax2 = alIsExtensionPresent("EAX2.0");
		bool eax3 = alIsExtensionPresent("EAX3.0");
		bool eax4 = alIsExtensionPresent("EAX4.0");
		bool eax5 = alIsExtensionPresent("EAX5.0");

		auto vendor = alGetString(AL_VERSION);
		auto version = alGetString(AL_VERSION);
		auto extensions = alcGetString(device, ALC_EXTENSIONS);

		alGenBuffers(MAX_SAMPLES_BUFFERS, samples_buffer_id);
		if (CheckError())
			return Release(), false;

		alGenSources(1, &samples_source_id);
		if (CheckError())
			return Release(), false;

		float sound_pos[] = { 0.0f, 0.0f, 0.0f };
		alSourcefv(samples_source_id, AL_POSITION, sound_pos);
		alSourcei(samples_source_id, AL_LOOPING, AL_FALSE);

		return true;
	}

	void Release()
	{
		if (samples_source_id != -1)
		{
			alDeleteSources(1, &samples_source_id);
			samples_source_id = -1;
		}

		for (int i = 0; i < MAX_SAMPLES_BUFFERS; ++i)
		{
			if (samples_buffer_id[i] != -1)
				alDeleteBuffers(1, &samples_buffer_id[i]);
			samples_buffer_id[i] = -1;
		}

		alcMakeContextCurrent(nullptr);
		if (context)
		{
			alcDestroyContext(context);
			context = nullptr;
		}
		if (device)
		{
			alcCloseDevice(device);
			device = nullptr;
		}
	}

	void PlaySample(int num_channels, int sample_rate, int num_samples, void *samples)
	{
		//char msg[256];sprintf(msg, "chan %u rate %5u num %5u ptr %p\n", num_channels, sample_rate, num_samples, samples);
		//log(msg);
		ALenum sound_format;
		switch (num_channels)
		{
		case 1: sound_format = AL_FORMAT_MONO16;    break;
		case 2: sound_format = AL_FORMAT_STEREO16;  break;
		default:
			if (bool multichannel = alIsExtensionPresent("AL_EXT_MCFORMATS"))
			{
				switch (num_channels)
				{
				case 4: sound_format = alGetEnumValue("AL_FORMAT_QUAD16");  break;
				case 6: sound_format = alGetEnumValue("AL_FORMAT_51CHN16"); break;
				case 7: sound_format = alGetEnumValue("AL_FORMAT_61CHN16"); break;
				case 8: sound_format = alGetEnumValue("AL_FORMAT_71CHN16"); break;
				}
			}
			__debugbreak();
		}

		float listener_pos[] = { 0.0f, 0.0f, 0.0f };
		float listener_vel[] = { 0.0f, 0.0f, 0.0f };
		float listener_orientation[] = { 0.0f, 0.0f, -1.0f, // direction
			0.0f, 1.0f, 0.0f }; // up vector
		alListenerfv(AL_POSITION, listener_pos);
		alListenerfv(AL_VELOCITY, listener_vel);
		alListenerfv(AL_ORIENTATION, listener_orientation);

		unsigned int *next_buffer = samples_buffer_id + samples_current_buffer;

		alBufferData(*next_buffer, sound_format, samples, num_samples * sizeof(__int16), sample_rate);
		CheckError();


		int num_processed_buffers;
		alGetSourcei(samples_source_id, AL_BUFFERS_PROCESSED, &num_processed_buffers);
		/*while (num_processed_buffers)
		{
			unsigned int processed_buffers_id[4];
			alSourceUnqueueBuffers(samples_source_id, min(4, num_processed_buffers), processed_buffers_id);
			CheckError();
			alGetSourcei(samples_source_id, AL_BUFFERS_PROCESSED, &num_processed_buffers);
		}*/
      for (int i = 0; i < num_processed_buffers; ++i)
      {
        unsigned int processed_buffer_id;
        alSourceUnqueueBuffers(samples_source_id, 1, &processed_buffer_id);
        if (!CheckError())
          alDeleteBuffers(1, &processed_buffer_id);
      }

		int num_queued_buffers;
		alGetSourcei(samples_source_id, AL_BUFFERS_QUEUED, &num_queued_buffers);
		if (num_queued_buffers >= MAX_SAMPLES_BUFFERS)
		{
			__debugbreak();
		}

		alSourceQueueBuffers(samples_source_id, 1, next_buffer);
		CheckError();

		int status;
		alGetSourcei(samples_source_id, AL_SOURCE_STATE, &status);
		if (status != AL_PLAYING)
		{
			alSourcePlay(samples_source_id);
			CheckError();
		}

		if (++samples_current_buffer >= MAX_SAMPLES_BUFFERS)
			samples_current_buffer = 0;
	}



protected:
	ALCdevice    *device;
	ALCcontext   *context;
	unsigned int  samples_buffer_id[MAX_SAMPLES_BUFFERS];
	unsigned int  samples_source_id;
	int           samples_current_buffer;


	bool CheckError()
	{
		ALenum code = alcGetError(device);
		if (code != ALC_NO_ERROR)
		{
			DWORD w;
			const char *message = alcGetString(device, code);
			WriteConsoleA(GetStdHandle(STD_OUTPUT_HANDLE), message, lstrlenA(message), &w, nullptr);
			return true;
		}
		return false;
	}
};
typedef OpenALSoundProviderGeneric<64> OpenALSoundProvider;



void av_logger(void *, int, const char *format, va_list args);



#include <memory>
using std::tr1::shared_ptr;
using std::tr1::make_shared;

class MultimediaFrame
{
public:
	typedef shared_ptr<MultimediaFrame> Ptr;
	inline MultimediaFrame(AVMediaType type, AVPacket *packet, AVCodecContext *dec_ctx)
	{
		this->type = type;
		this->f = nullptr;
		this->p = packet;
		this->dec_ctx = dec_ctx;
	}
	virtual ~MultimediaFrame() { return; }


	AVMediaType  Type() const { return type; }
	AVFrame     *GetAVFrame() { return f; }

	virtual int   Decode() = 0;
	virtual void *GetData() = 0;
	virtual int   GetDataPitch() = 0;


protected:
	AVMediaType      type;
	AVFrame         *f;
	AVPacket        *p;
	AVCodecContext  *dec_ctx;
};



class MultimediaVideoFrame : public MultimediaFrame
{
public:
	inline MultimediaVideoFrame(AVMediaType type, AVPacket *packet, AVCodecContext *dec_ctx, int width, int height) :
		MultimediaFrame(type, packet, dec_ctx)
	{
		this->width = width;
		this->height = height;
		this->rescaled_data[0] = nullptr;
		this->rescaled_linesize[0] = 0;
	}
	virtual ~MultimediaVideoFrame()
	{
		av_freep(&rescaled_data);
		av_frame_free(&f);
	}

	int Decode() override
	{
		f = avcodec_alloc_frame();
		avcodec_get_frame_defaults(f);

		volatile int done = false;
		do
		{
			int ret;
			if ((ret = avcodec_decode_video2(dec_ctx, f, (int *)&done, p)) < 0)
				return ret;
		} while (!done);
		if (Rescale(f, width, height, AV_PIX_FMT_RGB32, rescaled_data, rescaled_linesize))
			return 0;
		else return -1;
	}

	virtual void *GetData()       { return rescaled_data[0]; }
	virtual int   GetDataPitch()  { return rescaled_linesize[0]; }

protected:
	int      width;
	int      height;
	uint8_t *rescaled_data[8];
	int      rescaled_linesize[8];

	bool Rescale(AVFrame *frame, int dst_width, int dst_height, AVPixelFormat format, uint8_t **out_data, int *out_linesize)
	{
		if (av_image_alloc(out_data, out_linesize, dst_width, dst_height, format, 1) < 0)
			return false;

		SwsContext *converter = sws_getContext(frame->width, frame->height, (AVPixelFormat)frame->format,
			dst_width, dst_height, format,
			SWS_BICUBIC, nullptr, nullptr, nullptr);
		sws_scale(converter, frame->data, frame->linesize, 0, frame->height, out_data, out_linesize);
		sws_freeContext(converter);

		return true;
	}
};



class MultimediaAudioFrame : public MultimediaFrame
{
public:
	inline MultimediaAudioFrame(AVMediaType type, AVPacket *packet, AVCodecContext *dec_ctx) :
		MultimediaFrame(type, packet, dec_ctx)
	{
		this->resampled_data = nullptr;
	}
	virtual ~MultimediaAudioFrame()
	{
		av_free(resampled_data);
		av_frame_free(&f);
	}

	int Decode() override
	{
		f = avcodec_alloc_frame();
		avcodec_get_frame_defaults(f);

		volatile int done = false;
		do
		{
			int ret;
			if ((ret = avcodec_decode_audio4(dec_ctx, f, (int *)&done, p)) < 0)
				return ret;
		} while (!done);
		if (Resample(f, f->channel_layout, f->sample_rate,
			f->channel_layout, f->sample_rate, AV_SAMPLE_FMT_S16, &resampled_data))
			return 0;
		else return -1;
	}

	virtual void *GetData()       { return resampled_data; }
	virtual int   GetDataPitch()  { return 2 * f->nb_samples; }

protected:
	uint8_t *resampled_data;

	bool Resample(AVFrame *frame,
		int64_t src_channel_layout, int src_sample_rate,
		int64_t dst_channel_layout, int dst_sample_rate, AVSampleFormat dst_format, uint8_t **out_data)
	{
		SwrContext *converter = swr_alloc();

		av_opt_set_int(converter, "in_channel_layout", src_channel_layout, 0);
		av_opt_set_int(converter, "in_sample_rate", src_sample_rate, 0);
		av_opt_set_sample_fmt(converter, "in_sample_fmt", (AVSampleFormat)frame->format, 0);

		av_opt_set_int(converter, "out_channel_layout", dst_channel_layout, 0);
		av_opt_set_int(converter, "out_sample_rate", dst_sample_rate, 0);
		av_opt_set_sample_fmt(converter, "out_sample_fmt", dst_format, 0);

		if (swr_init(converter) < 0)
			return false;

		int dst_nb_samples;
		int max_dst_nb_samples = dst_nb_samples = av_rescale_rnd(frame->nb_samples, dst_sample_rate, src_channel_layout, AV_ROUND_UP);

		uint8_t **dst_data;
		int       dst_linesize;
		int dst_nb_channels = av_get_channel_layout_nb_channels(dst_channel_layout);
		if (av_samples_alloc_array_and_samples(&dst_data, &dst_linesize, dst_nb_channels, dst_nb_samples, dst_format, 0) < 0)
		{
			swr_free(&converter);
			return false;
		}

		dst_nb_samples = av_rescale_rnd(swr_get_delay(converter, src_sample_rate) + frame->nb_samples, dst_sample_rate, src_sample_rate, AV_ROUND_UP);
		if (dst_nb_samples > max_dst_nb_samples)
		{
			av_free(dst_data[0]);
			if (av_samples_alloc(dst_data, &dst_linesize, dst_nb_channels, dst_nb_samples, dst_format, 1) < 0)
			{
				swr_free(&converter);
				return false;
			}
			max_dst_nb_samples = dst_nb_samples;
		}


		if (swr_convert(converter, dst_data, dst_nb_samples, (const uint8_t **)frame->data, frame->nb_samples) < 0)
		{
			av_free(dst_data[0]);
			swr_free(&converter);
			return false;
		}

		*out_data = dst_data[0];
		return true;
	}
};

int readFunction(void* opaque, uint8_t* buf, int buf_size);

int64_t seekFunction(void* opaque, int64_t offset, int whence);

template<int NUM_PRECACHED_FRAMES>
class MovieCached
{
public:
	bool Stopped() { return stopped; }
	int GetWidth() { return width; }
	int GetHeight() { return height; }
	inline ~MovieCached()
	{
		Release();
	}
protected:
	friend class MultimediaPlayer;
	inline MovieCached(OpenALSoundProvider *sound_provider)
	{
		this->format_ctx = nullptr;
		this->sound_provider = sound_provider;
		this->stopped = false;

		this->video_stream_idx = -1;
		this->video_stream = nullptr;
		this->video_stream_dec = nullptr;
		this->video_stream_dec_ctx = nullptr;

		this->audio_stream_idx = -1;
		this->audio_stream = nullptr;
		this->audio_stream_dec = nullptr;
		this->audio_stream_dec_ctx = nullptr;

		packet = nullptr;
		ioBuffer = nullptr;
		format_ctx = nullptr;
		avioContext = nullptr;
	}

	bool LoadFromLOD(HANDLE h, int readFunction(void*, uint8_t*, int), int64_t seekFunction(void*, int64_t, int), int width, int height)
	{
		if (!ioBuffer)
			ioBuffer = (unsigned char *)av_malloc(16384 + FF_INPUT_BUFFER_PADDING_SIZE); // can get av_free()ed by libav
		if (!avioContext)
			avioContext = avio_alloc_context(ioBuffer, 16384, 0, h, &readFunction, NULL, &seekFunction);
		if (!format_ctx)
			format_ctx = avformat_alloc_context();
		format_ctx->pb = avioContext;
		return Load("dummyFilename", width, height);
	}

	bool Load(const char *video_filename, int width, int height)
	{
		this->width = width;
		this->height = height;
		
		if (avformat_open_input(&format_ctx, video_filename, nullptr, nullptr) >= 0)
		{
			if (avformat_find_stream_info(format_ctx, nullptr) >= 0)
			{
				av_dump_format(format_ctx, 0, video_filename, 0);
				
				video_stream_idx = OpenStream(AVMEDIA_TYPE_VIDEO, &video_stream, &video_stream_dec, &video_stream_dec_ctx);
				if (video_stream_idx < 0)
					return Release(), false;
				if (_stricmp("binkvideo", video_stream_dec->name) )//Ritor1: include
				{
					this->width = video_stream_dec_ctx->width;
					this->height = video_stream_dec_ctx->height;
				}

				audio_stream_idx = OpenStream(AVMEDIA_TYPE_AUDIO, &audio_stream, &audio_stream_dec, &audio_stream_dec_ctx);
				if (audio_stream_idx < 0)
					return Release(), false;

				strcpy(movie_name, video_filename);
				packet = new AVPacket;
				av_init_packet(packet);
				return true;
			}
		}
		return Release(), false;
	}

	bool Release()
	{
		if (packet)
		{
			av_free_packet(packet);
			delete packet;
			packet = nullptr;
		}

		if (video_stream_idx >= 0)
		{
			video_stream_idx = -1;
			video_stream = nullptr;
			video_stream_dec = nullptr;
			avcodec_close(video_stream_dec_ctx);
			video_stream_dec_ctx = nullptr;
		}

		if (audio_stream_idx >= 0)
		{
			audio_stream_idx = -1;
			audio_stream = nullptr;
			audio_stream_dec = nullptr;
			avcodec_close(audio_stream_dec_ctx);

		}
		if (avioContext)
		{
			av_free(avioContext);
			avioContext = nullptr;
		}
		if (format_ctx)
		{
			avformat_free_context(format_ctx);
			format_ctx = nullptr;
		}
		return true;
	}


	int OpenStream(AVMediaType type, AVStream **out_stream, AVCodec **out_dec, AVCodecContext **out_dec_ctx)
	{
		int stream_idx = av_find_best_stream(format_ctx, type, -1, -1, nullptr, 0);
		if (stream_idx < 0)
			return stream_idx;

		auto stream = format_ctx->streams[stream_idx];
		auto dec_ctx = stream->codec;
		auto dec = avcodec_find_decoder(dec_ctx->codec_id);
		if (dec)
		{
			if (avcodec_open2(dec_ctx, dec, nullptr) >= 0)
			{
				*out_stream = stream;
				*out_dec = dec;
				*out_dec_ctx = dec_ctx;
				return stream_idx;
			}
		}
		return -1;
	}

	MultimediaFrame::Ptr GetNextFrame()
	{
		packet->data = nullptr;
		packet->size = 0;

		volatile int got_frame = false;
		do
		{
			if (av_read_frame(format_ctx, packet) < 0)
			{
				stopped = true;
				return nullptr;
			}
		} while (packet->stream_index != video_stream_idx &&
			packet->stream_index != audio_stream_idx);

		if (packet->stream_index == video_stream_idx)
			return MultimediaFrame::Ptr(new MultimediaVideoFrame(AVMEDIA_TYPE_VIDEO, packet, video_stream_dec_ctx, width, height));
		else if (packet->stream_index == audio_stream_idx)
			return MultimediaFrame::Ptr(new MultimediaAudioFrame(AVMEDIA_TYPE_AUDIO, packet, audio_stream_dec_ctx));
		return nullptr;
	}



	char                 movie_name[256];
	int                  width;
	int                  height;
	bool                 stopped;
	AVFormatContext     *format_ctx;
	//AVFrame             *frame;
	AVPacket            *packet;
	//MultimediaFrame     *frames[NUM_PRECACHED_FRAMES];
	OpenALSoundProvider *sound_provider;

	int              video_stream_idx;
	AVStream        *video_stream;
	AVCodec         *video_stream_dec;
	AVCodecContext  *video_stream_dec_ctx;

	int              audio_stream_idx;
	AVStream        *audio_stream;
	AVCodec         *audio_stream_dec;
	AVCodecContext  *audio_stream_dec_ctx;
	unsigned char * ioBuffer;
	AVIOContext *avioContext;
};
typedef MovieCached<10> Movie;




class MultimediaPlayer
{
public:
	inline MultimediaPlayer()
	{
	}

	bool Initialize()
	{
		if (!libavcodec_initialized)
		{
			av_log_set_callback(Logger);
			avcodec_register_all();
			av_register_all();

			libavcodec_initialized = true;
		}

		sound_provider = new OpenALSoundProvider;
		sound_provider->Initialize();

		return true;
	}

	Movie *LoadMovieFromLOD(HANDLE h, int readFunction(void*, uint8_t*, int), int64_t seekFunction(void*, int64_t, int), int width, int height)
	{
		auto movie = new Movie(sound_provider);
		if (movie)
		{
			if (movie->LoadFromLOD(h, readFunction, seekFunction, width, height))
			{
				if (_stricmp("binkvideo", movie->video_stream_dec->name) )
				{
					current_movie_width = movie->video_stream_dec_ctx->width;
					current_movie_height = movie->video_stream_dec_ctx->height;
				}
				else
				{
					current_movie_width = width;
					current_movie_height = height;
				}
				return current_movie = movie;
			}
			delete movie;
		}
		return nullptr;
	}

	Movie *LoadMovie(const char *filename, int width, int height)
	{
		auto movie = new Movie(sound_provider);
		if (movie)
		{
			if (movie->Load(filename, width, height))
			{
				current_movie_width = width;
				current_movie_height = height;
				return current_movie = movie;
			}
			delete movie;
		}
		return nullptr;
	}

	inline char *DoFrame()
	{
		if (!current_movie)
			return nullptr;

		while (true)
		{
			auto frame = current_movie->GetNextFrame();
			if (!frame)
				return nullptr;

			if (frame->Type() == AVMEDIA_TYPE_AUDIO)
			{
				//continue;
//				uint8_t *data;
				if (frame->Decode() >= 0)
				{
					auto f = frame->GetAVFrame();
					sound_provider->PlaySample(f->channels, f->sample_rate, f->nb_samples, frame->GetData());
					Sleep(20);
					continue;
				}
			}
			else if (frame->Type() == AVMEDIA_TYPE_VIDEO)
			{
				uint8_t *dst_data[4] = { 0 };
				int      dst_linesize[4] = { 0 };
				if (frame->Decode() >= 0)
				{
					auto image = new char[current_movie_width * current_movie_height * 4];
					memcpy(image, frame->GetData(), current_movie_height * frame->GetDataPitch());

					return image;
				}
			}
			return nullptr;
		}
	}



protected:
	static void Logger(void *, int, const char *format, va_list args);

	OpenALSoundProvider *sound_provider;
	Movie               *current_movie;
	int                  current_movie_width;
	int                  current_movie_height;

	static bool libavcodec_initialized;
};






#pragma pack(push, 1)
struct VideoPlayer
{
  VideoPlayer();
  //----- (004BECBD) --------------------------------------------------------
  virtual ~VideoPlayer()
  {
    bStopBeforeSchedule = false;
    pResetflag = 0;
    pVideoFrame.Release();
  }

  void PlayDeathMovie();
  unsigned int SmackCheckSurfaceFromat();
  void Initialize(OSWindow *window);
  void Prepare();
  void Unload();
  void FastForwardToFrame(unsigned int uFrameNum);
  void BinkDrawFrame(int a3, int a4);
  void BinkUpdatePalette() {}
  void SmackDrawFrame(int a3, int a4);
  void SmackUpdatePalette();
  _BINK *OpenBink(const char *pName);
  struct _SMACK *OpenSmack(const char *pFilename);
  void OpenHouseMovie(const char *pMovieName, unsigned int a3_1);//0x4BF28F
  bool AnyMovieLoaded();
  void OpenGlobalMovie(const char *pFilename, unsigned int bLoop, int a4);
  void _4BF5B2();
  void SelectMovieType();//0x4BF73A
  _BINKBUF *CreateBinkBuffer(unsigned int uWidth, unsigned int uHeight, char a4);
  void _inlined_in_463149();

  void MovieLoop(const char *pMovieName, int a2, int a3, int a4);


  RGBTexture pVideoFrame;
  //struct _SMACK *pSmackerMovie;
  //struct _SMACKBUF *pSmackerBuffer;
  //char *pSomeSmackerBuffer;
  int field_34;
  MovieHeader *pMightVideoHeaders;
  MovieHeader *pMagicVideoHeaders;
  int pResetflag;
  int field_44;
  unsigned int uNumMightVideoHeaders;
  unsigned int uNumMagicVideoHeaders;
  int uBinkDirectDrawSurfaceType;
  int bBufferLoaded;
  unsigned int bPlayingMovie;
  unsigned int bFirstFrame;
  unsigned int bUsingSmackerMMX;
  unsigned int bLoopPlaying;
  int field_68;
  unsigned int bStopBeforeSchedule;
  //HWND hWindow;
  OSWindow *window;
  struct _SMACKBLIT *pSmackMovieBlit;
  HANDLE hMightVid;
  HANDLE hMagicVid;
  //_BINK *pBinkMovie;
  //_BINKBUF *pBinkBuffer;
  char field_88[20];
  unsigned int uMovieFormat;
  int uMovieFormatSwapped;
  char pCurrentMovieName[64];
  char pVideoFrameTextureFilename[32];
  int field_104;
  MultimediaPlayer *pPlayer;
  Movie *pMovie;
  HANDLE hVidFile;
  int uSize;
  int uOffset;
  void UpdatePalette();
  static int readFunction(void *, uint8_t *, int);
  static int64_t seekFunction(void *, int64_t, int);
  void LoadMovie(const char *);
};
#pragma pack(pop)





extern VideoPlayer *pVideoPlayer;
extern LRESULT __stdcall wWinProc(HWND hwnd, unsigned int msg, WPARAM wparam, LPARAM lparam);