Skip to content

Instantly share code, notes, and snippets.

@hardaqa
Forked from anonymous/motion_watch.c
Created April 14, 2021 14:03
Show Gist options
  • Save hardaqa/24b9ea03cfc0cc880056c39d2ef59cee to your computer and use it in GitHub Desktop.
Save hardaqa/24b9ea03cfc0cc880056c39d2ef59cee to your computer and use it in GitHub Desktop.

Revisions

  1. @invalid-email-address Anonymous created this gist Mar 21, 2014.
    377 changes: 377 additions & 0 deletions motion_watch.c
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,377 @@
    /*
    * Copyright (c) 2010 Nicolas George
    * Copyright (c) 2011 Stefano Sabatini
    *
    * Permission is hereby granted, free of charge, to any person obtaining a copy
    * of this software and associated documentation files (the "Software"), to deal
    * in the Software without restriction, including without limitation the rights
    * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    * copies of the Software, and to permit persons to whom the Software is
    * furnished to do so, subject to the following conditions:
    *
    * The above copyright notice and this permission notice shall be included in
    * all copies or substantial portions of the Software.
    *
    * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
    * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    * THE SOFTWARE.
    */

    /**
    * @file
    * API example for decoding and filtering
    * @example doc/examples/filtering_video.c
    */

    #define _XOPEN_SOURCE 600 /* for usleep */
    #include <unistd.h>
    #include <time.h>
    #include <stdio.h>
    #include <stdlib.h>
    #include <stdbool.h>
    #include <math.h>


    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libavfilter/avcodec.h>
    #include <libavfilter/buffersink.h>
    #include <libavfilter/buffersrc.h>

    #include <errno.h>
    #include <sys/types.h>
    #include <sys/inotify.h>
    #include <limits.h>
    #include <string.h>

    #define MAX_EVENTS 1024 /*Max. number of events to process at one go*/
    #define LEN_NAME 16 /*Assuming that the length of the filename won't exceed 16 bytes*/
    #define EVENT_SIZE ( sizeof (struct inotify_event) ) /*size of one event*/
    #define BUF_LEN ( MAX_EVENTS * ( EVENT_SIZE + LEN_NAME )) /*buffer to store the data of events*/

    #define IS_INTERLACED(a) ((a)&MB_TYPE_INTERLACED)
    #define IS_16X16(a) ((a)&MB_TYPE_16x16)
    #define IS_16X8(a) ((a)&MB_TYPE_16x8)
    #define IS_8X16(a) ((a)&MB_TYPE_8x16)
    #define IS_8X8(a) ((a)&MB_TYPE_8x8)
    #define USES_LIST(a, list) ((a) & ((MB_TYPE_P0L0|MB_TYPE_P1L0)<<(2*(list))))

    //FFMpeg interface change
    #define FF_I_TYPE AV_PICTURE_TYPE_I ///< Intra
    #define FF_P_TYPE AV_PICTURE_TYPE_P ///< Predicted
    #define FF_B_TYPE AV_PICTURE_TYPE_B ///< Bi-dir predicted
    #define FF_S_TYPE AV_PICTURE_TYPE_S ///< S(GMC)-VOP MPEG4
    #define FF_SI_TYPE AV_PICTURE_TYPE_SI ///< Switching Intra
    #define FF_SP_TYPE AV_PICTURE_TYPE_SP ///< Switching Predicted
    #define FF_BI_TYPE AV_PICTURE_TYPE_BI
    #define CODEC_TYPE_VIDEO AVMEDIA_TYPE_VIDEO

    int count = 0;
    double sum = 0;
    double h;
    double w;
    double cutoff;

    static AVFormatContext *fmt_ctx;
    static AVCodecContext *dec_ctx;
    AVFilterContext *buffersink_ctx;
    AVFilterContext *buffersrc_ctx;
    static int video_stream_index = -1;
    static int64_t last_pts = AV_NOPTS_VALUE;

    static int open_input_file(const char *filename)
    {
    int ret;
    AVCodec *dec;

    if ((ret = avformat_open_input(&fmt_ctx, filename, NULL, NULL)) < 0) {
    av_log(NULL, AV_LOG_INFO, "Cannot open input file\n");
    return ret;
    }

    if ((ret = avformat_find_stream_info(fmt_ctx, NULL)) < 0) {
    av_log(NULL, AV_LOG_INFO, "Cannot find stream information\n");
    return ret;
    }

    /* select the video stream */
    ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0);
    if (ret < 0) {
    av_log(NULL, AV_LOG_INFO, "Cannot find a video stream in the input file\n");
    return ret;
    }
    video_stream_index = ret;
    dec_ctx = fmt_ctx->streams[video_stream_index]->codec;

    /* init the video decoder */
    if ((ret = avcodec_open2(dec_ctx, dec, NULL)) < 0) {
    av_log(NULL, AV_LOG_INFO, "Cannot open video decoder\n");
    return ret;
    }

    return 0;
    }


    void print_vector(int x, int y, int dx, int dy)
    {
    if (dx != 10000 && dy != 10000){
    //sum = sum + sqrt((double)dx*(double)dx/w/w + (double)dy*(double)dy/h/h);
    sum = sum + sqrt(dx*dx+dy*dy);
    count++;
    }
    //if ((dx > 5 || dy > 5) && (dx != 10000 && dy != 10000)) {
    //printf("%d %d ; %d %d\n", x, y, dx, dy);
    //}
    }

    /* Print motion vector for each macroblock in this frame. If there is
    * no motion vector in some macroblock, it prints a magic number NO_MV. */
    void printMVMatrix(int index, AVFrame *pict, AVCodecContext *ctx)
    {
    const int mb_width = (ctx->width + 15) / 16;
    const int mb_height = (ctx->height + 15) / 16;
    const int mb_stride = mb_width + 1;
    const int mv_sample_log2 = 4 - pict->motion_subsample_log2;
    const int mv_stride = (mb_width << mv_sample_log2) + (ctx->codec_id == CODEC_ID_H264 ? 0 : 1);
    const int quarter_sample = (ctx->flags & CODEC_FLAG_QPEL) != 0;
    const int shift = 1 + quarter_sample;


    //printf("frame %d, %d x %d\n", index, mb_height, mb_width);

    for (int mb_y = 0; mb_y < mb_height; mb_y++) {
    for (int mb_x = 0; mb_x < mb_width; mb_x++) {
    const int mb_index = mb_x + mb_y * mb_stride;
    if (pict->motion_val) {
    for (int type = 0; type < 3; type++) {
    int direction = 0;
    switch (type) {
    case 0:
    if (pict->pict_type != FF_P_TYPE)
    continue;
    direction = 0;
    break;
    case 1:
    if (pict->pict_type != FF_B_TYPE)
    continue;
    direction = 0;
    break;
    case 2:
    if (pict->pict_type != FF_B_TYPE)
    continue;
    direction = 1;
    break;
    }
    if (!USES_LIST(pict->mb_type[mb_index], direction)) {
    #define NO_MV 10000
    if (IS_8X8(pict->mb_type[mb_index])) {
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    } else if (IS_16X8(pict->mb_type[mb_index])) {
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    } else if (IS_8X16(pict->mb_type[mb_index])) {
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    } else {
    print_vector(mb_x, mb_y, NO_MV, NO_MV);
    }
    #undef NO_MV
    continue;
    }

    if (IS_8X8(pict->mb_type[mb_index])) {
    for (int i = 0; i < 4; i++) {
    int xy = (mb_x*2 + (i&1) + (mb_y*2 + (i>>1))*mv_stride) << (mv_sample_log2-1);
    int dx = (pict->motion_val[direction][xy][0]>>shift);
    int dy = (pict->motion_val[direction][xy][1]>>shift);
    print_vector(mb_x, mb_y, dx, dy);
    }
    } else if (IS_16X8(pict->mb_type[mb_index])) {
    for (int i = 0; i < 2; i++) {
    int xy = (mb_x*2 + (mb_y*2 + i)*mv_stride) << (mv_sample_log2-1);
    int dx = (pict->motion_val[direction][xy][0]>>shift);
    int dy = (pict->motion_val[direction][xy][1]>>shift);

    if (IS_INTERLACED(pict->mb_type[mb_index]))
    dy *= 2;

    print_vector(mb_x, mb_y, dx, dy);
    }
    } else if (IS_8X16(pict->mb_type[mb_index])) {
    for (int i = 0; i < 2; i++) {
    int xy = (mb_x*2 + i + mb_y*2*mv_stride) << (mv_sample_log2-1);
    int dx = (pict->motion_val[direction][xy][0]>>shift);
    int dy = (pict->motion_val[direction][xy][1]>>shift);

    if (IS_INTERLACED(pict->mb_type[mb_index]))
    dy *= 2;

    print_vector(mb_x, mb_y, dx, dy);
    }
    } else {
    int xy = (mb_x + mb_y*mv_stride) << mv_sample_log2;
    int dx = (pict->motion_val[direction][xy][0]>>shift);
    int dy = (pict->motion_val[direction][xy][1]>>shift);
    print_vector(mb_x, mb_y, dx, dy);
    }
    }
    }
    //printf("--\n");
    }
    //printf("====\n");
    }
    }



    void motion_watch( char *input_f )
    {
    int ret;
    count = 0;
    sum = 0;
    AVPacket packet;
    AVFrame *frame = avcodec_alloc_frame();
    int got_frame;

    if (!frame) {
    perror("Could not allocate frame");
    }

    avcodec_register_all();
    av_register_all();

    if ((ret = open_input_file(input_f)) < 0)
    goto end;

    dec_ctx->skip_loop_filter = AVDISCARD_ALL; //1m12s -> 54s
    //dec_ctx->skip_idct = AVDISCARD_ALL; //did nothing - think h.264 doesn't touch these
    //dec_ctx->idct_algo = FF_IDCT_SIMPLEARMV6; // did nothing
    //dec_ctx->idct_algo = FF_IDCT_SIMPLENEON; // did nothing
    //dec_ctx->idct_algo = FF_IDCT_INT; // did nothing
    //dec_ctx->lowres=2; //spewed errors and crashed
    //dec_ctx->bits_per_coded_sample=2; //did nothing

    //dec_ctx->draw_horiz_band =(int)1; // cored
    //dec_ctx->skip_frame = AVDISCARD_ALL; // did nothing - 55->47 and sprewed loads of errors
    //dec_ctx->skip_top=(int)128; // did nothing
    //dec_ctx->thread_count=2; // didn't do anything
    //dec_ctx->flags2 |= CODEC_FLAG2_FAST; // didn't do anthing
    dec_ctx->flags |= CODEC_FLAG_GRAY; // 55s->46s
    //dec_ctx->flags |= CODEC_FLAG_LOOP_FILTER; // didnt do anything
    //dec_ctx->flags2 |= CODEC_FLAG2_IGNORE_CROP; // didn't do anything
    /* read all packets */
    int f = 1;
    time_t start_t, end_t;
    double diff_t;
    time(&start_t);
    while (1) {
    AVFilterBufferRef *picref;
    if ((ret = av_read_frame(fmt_ctx, &packet)) < 0)
    break;

    //if (packet.stream_index == video_stream_index) {
    if (packet.stream_index == video_stream_index && ( f % 6 == 0 || (f-1) % 6==0 || f<10 )) { // process only every seond packet 46->25 (every 3rd -> 17s
    avcodec_get_frame_defaults(frame);
    got_frame = 0;
    ret = avcodec_decode_video2(dec_ctx, frame, &got_frame, &packet);
    if (ret < 0) {
    av_log(NULL, AV_LOG_ERROR, "Error decoding video\n");
    break;
    }

    if (got_frame) {
    if(frame->pict_type != FF_I_TYPE) {
    //printf("Got frame \n");
    printMVMatrix( f, frame, dec_ctx );
    }
    }
    }
    ++f;
    av_free_packet(&packet);
    }
    end:
    time(&end_t);
    diff_t = difftime(end_t, start_t);
    printf("Execution time = %f\n", diff_t);
    printf("[%d] sum mv: %f, total # mv: %d\n",start_t,sum,count);

    if (dec_ctx)
    avcodec_close(dec_ctx);
    if (fmt_ctx)
    avformat_close_input(&fmt_ctx);
    if (frame)
    av_freep(&frame);

    }


    int main( int argc, char **argv )
    {
    int length, i = 0, wd;
    int fd;
    char buffer[BUF_LEN];

    /* Initialize Inotify*/
    fd = inotify_init();
    if ( fd < 0 ) {
    perror( "Couldn't initialize inotify");
    }

    /* add watch to starting directory */
    wd = inotify_add_watch(fd, argv[1], IN_CLOSE_WRITE );

    if (wd == -1)
    {
    printf("Couldn't add watch to %s\n",argv[1]);
    }
    else
    {
    printf("Watching:: %s\n",argv[1]);
    }

    /* do it forever*/
    while(1)
    {
    i = 0;
    length = read( fd, buffer, BUF_LEN );

    if ( length < 0 ) {
    perror( "read" );
    }

    while ( i < length ) {
    struct inotify_event *event = ( struct inotify_event * ) &buffer[ i ];
    if ( event->len ) {
    if ( event->mask & IN_CLOSE_WRITE) {
    if ( ! (event->mask & IN_ISDIR)) {
    char *dot = strrchr(event->name, '.');
    if (dot && !strcmp(dot, ".ts")) {
    char fname[200];
    strcpy(fname, argv[1]);
    strcat(fname, event->name);
    printf( "New video segment %s\n", fname );
    motion_watch(fname);

    }
    }
    }

    i += EVENT_SIZE + event->len;
    }
    }
    }

    /* Clean up*/
    inotify_rm_watch( fd, wd );
    close( fd );

    return 0;
    }