2017-08-12 2 views
1

Ich versuche, einen Video-Player zu machen. Ich habe einen Thread hinzugefügt, der angibt, wie lange ein Video auf dem Bildschirm angezeigt werden soll. Ich versuche das Video- und Update-Fenster im Haupt-Thread zu dekodieren; Der zweite Thread wird die Pakete abrufen, sehen, wie lange das Paket angezeigt werden soll, und das Paket an den Haupt-Thread senden und dann auf die verstreichende Zeit warten.Std :: Future_error bei der Verwendung von Std :: Versprechen

Aus irgendeinem Grund bekomme ich diesen Fehler:

terminate called after throwing an instance of 'std::future_error' 
    what(): std::future_error: No associated state 

Was den Fehler verursacht?

Mein Code:

extern "C"{ 
    //FFmpeg libraries 
    #include <libavcodec/avcodec.h> 
    #include <libavformat/avformat.h> 
    #include <libswscale/swscale.h> 

    //SDL2 libraries 
    #include <SDL2/SDL.h> 
} 
// compatibility with newer API 
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1) 
#define av_frame_alloc avcodec_alloc_frame 
#define av_frame_free avcodec_free_frame 
#endif 

//C++ libraries 
#include <memory> 
#include <stdio.h> 
#include <iostream> 
#include <chrono> 
#include <thread> 
#include <mutex> 
#include <condition_variable> 
#include <future> 


typedef struct PacketQueue { 
    AVPacketList *first_pkt, *last_pkt; 
} PacketQueue; 

std::atomic<bool>   quitting; 
std::mutex     mutex; 
std::condition_variable  convar; 

int packet_queue_put(PacketQueue *q, AVPacket *pkt){ 
    AVPacketList *pkt1; 
    if(av_dup_packet(pkt) < 0){ 
     return -1; 
    } 
    pkt1 = (AVPacketList*) av_malloc(sizeof(AVPacketList)); 
    if(!pkt1){ 
     return -1; 
    } 
    pkt1->pkt = *pkt; 
    pkt1->next = NULL; 

    std::lock_guard<std::mutex> lock(mutex); 

    if (!q->last_pkt){ 
     q->first_pkt = pkt1; 
    }else{ 
     q->last_pkt->next = pkt1; 
    } 
    q->last_pkt = pkt1; 
    convar.notify_all(); 
    return 0; 
} 

static int packet_queue_get(PacketQueue *q, AVPacket *pkt){ 
    AVPacketList *pkt1; 
    int ret; 

    std::unique_lock<std::mutex> lk(mutex); 
    while(1){ 
     if(quitting){ 
      ret = -1; 
      break; 
     } 

     pkt1 = q->first_pkt; 
     if(pkt1){ 
      q->first_pkt = pkt1->next; 
      if(!q->first_pkt){ 
       q->last_pkt = NULL; 
      } 
      *pkt = pkt1->pkt; 
      av_free(pkt1); 
      ret = 1; 
      break; 
     }else { 
      convar.wait_for(lk, std::chrono::milliseconds(1)); 
     } 
    } 
    return ret; 
} 

void videoTimerFunc(AVRational time_base, PacketQueue* videoq, std::promise<AVPacket> prms){ 
    AVPacket pkt; 
    int64_t last_pts = 0; 
    int64_t frameDelay; 
    AVRational microseconds = {1, 1000000}; 

    while(!quitting){ 
     // Getting packet and check if there are more packets 
     if(!packet_queue_get(videoq, &pkt)){ 
      // Close programme 
      quitting = true; 
     }else { 
      // Send packet and create timer 
      frameDelay = av_rescale_q(pkt.dts, time_base, microseconds) - last_pts; 
      last_pts = av_rescale_q(pkt.dts, time_base, microseconds); 
      prms.set_value(pkt); 

      std::this_thread::sleep_for(std::chrono::microseconds(frameDelay)); 
     } 
    } 
} 

int main(int argc, char *argv[]){ 
AVFormatContext*    FormatCtx = nullptr; 
AVCodecContext*     CodecCtxOrig = nullptr; 
AVCodecContext*     CodecCtx = nullptr; 
AVCodec*      Codec = nullptr; 
int        videoStream; 
AVFrame*      Frame = nullptr; 
AVPacket      packet; 
struct SwsContext*    SwsCtx = nullptr; 

PacketQueue      videoq; 
std::promise<AVPacket>   pktprms; 
std::future<AVPacket>   pktftr = pktprms.get_future(); 
int        frameFinished; 
int64_t       lastPTS; 

SDL_Event      event; 
SDL_Window*      screen; 
SDL_Renderer*     renderer; 
SDL_Texture*     texture; 
std::shared_ptr<Uint8>   yPlane, uPlane, vPlane; 
int        uvPitch; 

if (argc != 2) { 
    fprintf(stderr, "Usage: %s <file>\n", argv[0]); 
    return -1; 
} 

// Register all formats and codecs 
av_register_all(); 

// Initialise SDL2 
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { 
    fprintf(stderr, "Couldn't initialise SDL - %s\n", SDL_GetError()); 
    return -1; 
} 

// Setting things up 
quitting = false; 
memset(&videoq, 0, sizeof(PacketQueue)); 

// Open video file 
if(avformat_open_input(&FormatCtx, argv[1], NULL, NULL) != 0){ 
    fprintf(stderr, "Couldn't open file\n");   
    return -1; // Couldn't open file 
} 

// Retrieve stream information 
if(avformat_find_stream_info(FormatCtx, NULL) < 0){ 
    fprintf(stderr, "Couldn't find stream information\n"); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 

    return -1; // Couldn't find stream information 
} 

// Find the video stream 
videoStream = av_find_best_stream(FormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); 
if(videoStream < 0){ 
    fprintf(stderr, "Couldn't find video stream\n"); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 

    return -1; // Didn't find a video stream 
} 

// Get a pointer to the codec context for the video stream 
CodecCtxOrig = FormatCtx->streams[videoStream]->codec; 

// Find the decoder for the video stream 
Codec = avcodec_find_decoder(CodecCtxOrig->codec_id); 
if(Codec == NULL){ 
    fprintf(stderr, "Unsupported codec\n"); 

    // Close the codec 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 

    return -1; // Codec not found 
} 

// Copy context 
CodecCtx = avcodec_alloc_context3(Codec); 
if(avcodec_copy_context(CodecCtx, CodecCtxOrig) != 0){ 
    fprintf(stderr, "Couldn't copy codec context"); 

    // Close the codec 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 

    return -1; // Error copying codec context 
} 

// Open codec 
if(avcodec_open2(CodecCtx, Codec, NULL) < 0){ 
    fprintf(stderr, "Couldn't open codec\n"); 

    // Close the codec 
    avcodec_close(CodecCtx); 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 
    return -1; // Could not open codec 
} 

// Allocate video frame 
Frame = av_frame_alloc(); 

// Make a screen to put our video 
screen = SDL_CreateWindow("Video Player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, CodecCtx->width, CodecCtx->height, 0); 
if(!screen){ 
    fprintf(stderr, "SDL: could not create window - exiting\n"); 
    quitting = true; 

    // Clean up SDL2 
    SDL_Quit(); 

    // Free the YUV frame 
    av_frame_free(&Frame); 

    // Close the codec 
    avcodec_close(CodecCtx); 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 

    return -1; 
} 

renderer = SDL_CreateRenderer(screen, -1, 0); 
if(!renderer){ 
    fprintf(stderr, "SDL: could not create renderer - exiting\n"); 
    quitting = true; 

    // Clean up SDL2 
    SDL_DestroyWindow(screen); 
    SDL_Quit(); 

    // Free the YUV frame 
    av_frame_free(&Frame); 

    // Close the codec 
    avcodec_close(CodecCtx); 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 
    return -1; 
} 

// Allocate a place to put our YUV image on that screen 
texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, CodecCtx->width, CodecCtx->height); 
if(!texture){ 
    fprintf(stderr, "SDL: could not create texture - exiting\n"); 
    quitting = true; 

    // Clean up SDL2 
    SDL_DestroyRenderer(renderer); 
    SDL_DestroyWindow(screen); 
    SDL_Quit(); 

    // Free the YUV frame 
    av_frame_free(&Frame); 

    // Close the codec 
    avcodec_close(CodecCtx); 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 
    return -1; 
} 

// Initialise SWS context for software scaling 
SwsCtx = sws_getContext(CodecCtx->width, CodecCtx->height, CodecCtx->pix_fmt, 
      CodecCtx->width, CodecCtx->height, PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL); 
if(!SwsCtx){ 
    fprintf(stderr, "Couldn't create sws context\n"); 
    quitting = true; 

    // Clean up SDL2 
    SDL_DestroyTexture(texture); 
    SDL_DestroyRenderer(renderer); 
    SDL_DestroyWindow(screen); 
    SDL_Quit(); 

    // Free the YUV frame 
    av_frame_free(&Frame); 

    // Close the codec 
    avcodec_close(CodecCtx); 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 
    return -1; 
} 

// set up YV12 pixel array (12 bits per pixel) 
yPlane = std::shared_ptr<Uint8>((Uint8 *)::operator new (CodecCtx->width * CodecCtx->height, std::nothrow)); 
uPlane = std::shared_ptr<Uint8>((Uint8 *)::operator new (CodecCtx->width * CodecCtx->height/4, std::nothrow)); 
vPlane = std::shared_ptr<Uint8>((Uint8 *)::operator new (CodecCtx->width * CodecCtx->height/4, std::nothrow)); 
uvPitch = CodecCtx->width/2; 

if (!yPlane || !uPlane || !vPlane) { 
    fprintf(stderr, "Could not allocate pixel buffers - exiting\n"); 
    quitting = true; 

    // Clean up SDL2 
    SDL_DestroyTexture(texture); 
    SDL_DestroyRenderer(renderer); 
    SDL_DestroyWindow(screen); 
    SDL_Quit(); 

    // Free the YUV frame 
    av_frame_free(&Frame); 

    // Close the codec 
    avcodec_close(CodecCtx); 
    avcodec_close(CodecCtxOrig); 

    // Close the video file 
    avformat_close_input(&FormatCtx); 
    return -1; 
} 

std::thread videoTimerThread(videoTimerFunc, FormatCtx->streams[videoStream]->time_base, &videoq, std::move(pktprms)); 

while (!quitting) { 
    // Check for more packets 
    if(av_read_frame(FormatCtx, &packet) >= 0){ 
     // Check what stream it belongs to 
     if (packet.stream_index == videoStream) { 
      packet_queue_put(&videoq, &packet); 
     }else{ 
      // Free the packet that was allocated by av_read_frame 
      av_free_packet(&packet); 
     } 
    } 

    // Check if its time to update 
    if(pktftr.wait_for(std::chrono::milliseconds(1)) == std::future_status::ready){ 
     // Getting packet 
     packet = pktftr.get(); 

     // Decode video frame 
     avcodec_decode_video2(CodecCtx, Frame, &frameFinished, &packet); 

     // Did we get a video frame? 
     if (frameFinished) { 
      AVPicture pict; 
      pict.data[0] = yPlane.get(); 
      pict.data[1] = uPlane.get(); 
      pict.data[2] = vPlane.get(); 
      pict.linesize[0] = CodecCtx->width; 
      pict.linesize[1] = uvPitch; 
      pict.linesize[2] = uvPitch; 

      // Convert the image into YUV format that SDL uses 
      sws_scale(SwsCtx, (uint8_t const * const *) Frame->data, Frame->linesize, 0, CodecCtx->height, pict.data, pict.linesize); 

      SDL_UpdateYUVTexture(texture, NULL, yPlane.get(), CodecCtx->width, uPlane.get(), uvPitch, vPlane.get(), uvPitch); 

      SDL_RenderClear(renderer); 
      SDL_RenderCopy(renderer, texture, NULL, NULL); 
      SDL_RenderPresent(renderer); 
     } 
     // Free the packet that was allocated by av_read_frame 
     av_free_packet(&packet); 
    } 

    SDL_PollEvent(&event); 
    switch (event.type) { 
     case SDL_QUIT: 
      quitting = true; 
      break; 
     default: 
      break; 
    } 
} 

videoTimerThread.join(); 

//SDL2 clean up 
SDL_DestroyTexture(texture); 
SDL_DestroyRenderer(renderer); 
SDL_DestroyWindow(screen); 
SDL_Quit(); 

// Free the YUV frame 
av_frame_free(&Frame); 

// Free Sws 
sws_freeContext(SwsCtx); 

// Close the codec 
avcodec_close(CodecCtx); 
avcodec_close(CodecCtxOrig); 

// Close the video file 
avformat_close_input(&FormatCtx); 

return 0; 
} 
+0

'std :: future' ist One-Shot. Sobald Sie 'get()' aufgerufen haben, befindet es sich im ungültigen Zustand und alle weiteren 'wait *()' oder 'get()' Aufrufe zeigen ein undefiniertes Verhalten. –

+0

Also ist das Problem mit den 'ptkftr.get()' und 'ptkftr.wait_for()' zu tun? – Gathros

+0

Das Problem liegt in Ihrem Versuch, "std :: promise" und "std :: future" in einer Schleife zu verwenden. Sie arbeiten nicht so; Sie können einmal genau einen Wert kommunizieren. –

Antwort

0

Sie nicht std::promise und std::future in einer Schleife verwenden können. Danke an Igor Tandetnik, dass Sie das kommentiert haben.

Verwandte Themen