AiCPlayer
Interface of aic vm - for rendering aspect, sensors, video records
grabber.c
Go to the documentation of this file.
1 
5 #include <X11/X.h> // for Drawable, ZPixmap
6 #include <X11/Xlib.h> // for XImage, XGetImage, AllPlanes, XCre..
7 #include <X11/Xutil.h> // for XDestroyImage, XGetPixel
8 #include <errno.h> // for EBUSY
9 #include <libavcodec/avcodec.h> // for AVCodecContext, AVPacket, AVCodec
10 #include <libswresample/swresample.h> // swr_free
11 #include <libavformat/avformat.h> // for AVFormatContext, AVOutputFormat
12 #include <libavformat/avio.h> // for avio_closep, avio_open, AVIO_FLAG_..
13 #include <libavutil/avutil.h> // for AVMediaType::AVMEDIA_TYPE_VIDEO
14 #include <libavutil/dict.h> // for AVDictionary, av_dict_copy, av_dic..
15 #include <libavutil/error.h> // for av_err2str
16 #include <libavutil/frame.h> // for AVFrame, av_frame_alloc, av_frame_..
17 #include <libavutil/pixfmt.h> // for AVPixelFormat::AV_PIX_FMT_YUV420P
18 #include <libavutil/rational.h> // for AVRational
19 #include <pthread.h> // for pthread_join, pthread_t, pthread_m..
20 #include <stdint.h> // for uint8_t
21 #include <stdio.h> // for NULL, fprintf, stderr, fclose, fopen
22 #include <stdlib.h> // for exit, free, malloc
23 #include <string.h> // for memset
24 #include <sys/select.h> // for FD_ISSET, FD_SET, FD_ZERO, fd_set
25 #include <sys/stat.h> // for stat
26 #include <sys/time.h> // for timeval, gettimeofday
27 #include <time.h> // for timespec, time_t
28 #include <unistd.h> // for sleep, usleep
29 
30 #include "amqp_listen.h"
31 #include "buffer_sizes.h"
32 #include "logger.h"
33 #include "recording.pb-c.h"
34 #include "sensors.h"
35 #include "socket.h"
36 
37 #include "grabber.h"
38 
39 #define LOG_TAG "grabber"
40 
41 #define READ_BUFFER_SIZE 1024
42 
46 extern int g_width;
47 
51 extern int g_height;
52 
56 extern void* g_window_id;
57 
61 static Display* s_display;
62 
66 static char* s_path_results;
67 
68 void grabber_set_display(Display* display)
69 {
70  s_display = display;
71 }
72 
73 void grabber_set_path_results(char* results)
74 {
75  s_path_results = results;
76 }
77 
78 /*
79  \brief To stop video recording \a fd.
80  \param mtx mutex to stop video recording
81 */
82 int needQuit(pthread_mutex_t* mtx)
83 {
84  switch (pthread_mutex_trylock(mtx))
85  {
86  case 0: /* if we got the lock, unlock and return 1 (true) */
87  LOGM("we got the lock, unlock and return 1 (true). -> NeedQuit Stop record");
88  pthread_mutex_unlock(mtx);
89  return 1;
90  case EBUSY: /* return 0 (false) if the mutex was locked */
91  return 0;
92  }
93  return 1;
94 }
95 
96 static void log_packet(const AVFormatContext* fmt_ctx, const AVPacket* pkt)
97 {
98  (void) fmt_ctx;
99  (void) pkt;
100  /*
101  AVRational* time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;
102 
103  printf("pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",
104  av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, time_base),
105  av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, time_base),
106  av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, time_base),
107  pkt->stream_index);*/
108 }
109 
110 static int write_frame(AVFormatContext* fmt_ctx, const AVRational* time_base, AVStream* st,
111  AVPacket* pkt)
112 {
113  /* rescale output packet timestamp values from codec to stream timebase */
114  av_packet_rescale_ts(pkt, *time_base, st->time_base);
115  pkt->stream_index = st->index;
116 
117  /* Write the compressed frame to the media file. */
118  log_packet(fmt_ctx, pkt);
119  return av_interleaved_write_frame(fmt_ctx, pkt);
120 }
121 
122 /* Add an output stream. */
123 static void add_stream(OutputStream* ost, AVFormatContext* oc, AVCodec** codec,
124  enum AVCodecID codec_id)
125 {
126  AVCodecContext* c;
127 
128  /* find the encoder */
129  *codec = avcodec_find_encoder(codec_id);
130  if (!(*codec))
131  {
132  fprintf(stderr, "Could not find encoder for '%s'\n", avcodec_get_name(codec_id));
133  exit(1);
134  }
135 
136  ost->st = avformat_new_stream(oc, *codec);
137  if (!ost->st)
138  {
139  fprintf(stderr, "Could not allocate stream\n");
140  exit(1);
141  }
142  ost->st->id = oc->nb_streams - 1;
143  c = ost->st->codec;
144 
145  switch ((*codec)->type)
146  {
147  case AVMEDIA_TYPE_VIDEO:
148  c->codec_id = codec_id;
149 
150  c->bit_rate = 400000;
151  /* Resolution must be a multiple of two. */
152  c->width = g_width;
153  c->height = g_height;
154  /* timebase: This is the fundamental unit of time (in seconds) in terms
155  * of which frame timestamps are represented. For fixed-fps content,
156  * timebase should be 1/framerate and timestamp increments should be
157  * identical to 1. */
158  ost->st->time_base = (AVRational){1, STREAM_FRAME_RATE};
159  c->time_base = ost->st->time_base;
160 
161  c->gop_size = 12; /* emit one intra frame every twelve frames at most */
162  c->pix_fmt = STREAM_PIX_FMT;
163  if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
164  {
165  /* just for testing, we also add B frames */
166  c->max_b_frames = 2;
167  }
168  if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
169  {
170  /* Needed to avoid using macroblocks in which some coeffs overflow.
171  * This does not happen with normal video, it just happens here as
172  * the motion of the chroma plane does not match the luma plane. */
173  c->mb_decision = 2;
174  }
175  break;
176 
177  default:
178  break;
179  }
180 
181  /* Some formats want stream headers to be separate. */
182  if (oc->oformat->flags & AVFMT_GLOBALHEADER)
183  c->flags |= CODEC_FLAG_GLOBAL_HEADER;
184 }
185 
186 /**************************************************************/
187 /* video output */
188 
189 static AVFrame* alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
190 {
191  AVFrame* picture;
192  int ret;
193 
194  picture = av_frame_alloc();
195  if (!picture)
196  return NULL;
197 
198  picture->format = pix_fmt;
199  picture->width = width;
200  picture->height = height;
201 
202  /* allocate the buffers for the frame data */
203  ret = av_frame_get_buffer(picture, 32);
204  if (ret < 0)
205  {
206  fprintf(stderr, "Could not allocate frame data.\n");
207  exit(1);
208  }
209 
210  return picture;
211 }
212 
213 static void open_video(AVCodec* codec, OutputStream* ost, AVDictionary* opt_arg)
214 {
215  int ret;
216  AVCodecContext* c = ost->st->codec;
217  AVDictionary* opt = NULL;
218 
219  av_dict_copy(&opt, opt_arg, 0);
220 
221  /* open the codec */
222  ret = avcodec_open2(c, codec, &opt);
223  av_dict_free(&opt);
224  if (ret < 0)
225  {
226  fprintf(stderr, "Could not open video codec: %s\n", av_err2str(ret));
227  exit(1);
228  }
229 
230  /* allocate and init a re-usable frame */
231  ost->frame = alloc_picture(c->pix_fmt, c->width, c->height);
232  if (!ost->frame)
233  {
234  fprintf(stderr, "Could not allocate video frame\n");
235  exit(1);
236  }
237 
238  /* If the output format is not YUV420P, then a temporary YUV420P
239  * picture is needed too. It is then converted to the required
240  * output format. */
241  ost->tmp_frame = NULL;
242  if (c->pix_fmt != AV_PIX_FMT_YUV420P)
243  {
244  ost->tmp_frame = alloc_picture(AV_PIX_FMT_YUV420P, c->width, c->height);
245  if (!ost->tmp_frame)
246  {
247  fprintf(stderr, "Could not allocate temporary picture\n");
248  exit(1);
249  }
250  }
251 }
252 
253 /* Prepare a dummy image. */
254 static void fill_yuv_image(AVFrame* pict, int width, int height)
255 {
256  int x, y, ret;
257 
258  /* when we pass a frame to the encoder, it may keep a reference to it
259  * internally;
260  * make sure we do not overwrite it here
261  */
262  ret = av_frame_make_writable(pict);
263  if (ret < 0)
264  exit(1);
265 
267  XImage* image =
268  XGetImage(s_display, (Drawable) g_window_id, 0, 0, width, height, AllPlanes, ZPixmap);
269 
270  // unsigned char *array = new unsigned char[width * height * 3];
271  unsigned long red_mask = image->red_mask;
272  unsigned long green_mask = image->green_mask;
273  unsigned long blue_mask = image->blue_mask;
274 
275  unsigned long pixel;
276 
277  unsigned char blue; //= pixel & blue_mask;
278  unsigned char green; //= (pixel & green_mask) >> 8;
279  unsigned char red; //= (pixel & red_mask) >> 16;
280 
281  // Y
282  for (y = 0; y < height; y++)
283  {
284  for (x = 0; x < width; x++)
285  {
286  pixel = XGetPixel(image, x, y);
287  blue = pixel & blue_mask;
288  green = (pixel & green_mask) >> 8;
289  red = (pixel & red_mask) >> 16;
290  pict->data[0][y * pict->linesize[0] + x] =
291  RGB2Y(red, green, blue); // *(img+ x + y + i * 3);
292  pict->data[1][(y / 2) * pict->linesize[1] + (x / 2)] = RGB2U(red, green, blue);
293  pict->data[2][(y / 2) * pict->linesize[2] + (x / 2)] = RGB2V(red, green, blue);
294  }
295  }
296 
297  XDestroyImage(image);
298 }
299 
300 static AVFrame* get_video_frame(OutputStream* ost, void* arg)
301 {
302  AVCodecContext* c = ost->st->codec;
303 
304  /* check if we want to generate more frames
305  if (av_compare_ts(ost->next_pts, ost->st->codec->time_base,
306  STREAM_DURATION, (AVRational){ 1, 1 }) >= 0)
307  return NULL;*/
308  pthread_mutex_t* mx = arg;
309  while (needQuit(mx))
310  {
311  return NULL;
312  }
313 
314  if (c->pix_fmt != AV_PIX_FMT_YUV420P)
315  {
316  /* as we only generate a YUV420P picture, we must convert it
317  * to the codec pixel format if needed */
318  if (!ost->sws_ctx)
319  {
320  ost->sws_ctx = sws_getContext(c->width, c->height, AV_PIX_FMT_YUV420P, c->width,
321  c->height, c->pix_fmt, SCALE_FLAGS, NULL, NULL, NULL);
322  if (!ost->sws_ctx)
323  {
324  fprintf(stderr, "Could not initialize the conversion context\n");
325  exit(1);
326  }
327  }
328  fill_yuv_image(ost->tmp_frame, c->width, c->height);
329  sws_scale(ost->sws_ctx, (const uint8_t* const*) ost->tmp_frame->data,
330  ost->tmp_frame->linesize, 0, c->height, ost->frame->data, ost->frame->linesize);
331  }
332  else
333  {
334  fill_yuv_image(ost->frame, c->width, c->height);
335  }
336 
337  ost->frame->pts = ost->next_pts++;
338 
339  return ost->frame;
340 }
341 
342 /*
343  * encode one video frame and send it to the muxer
344  * return 1 when encoding is finished, 0 otherwise
345  */
346 static int write_video_frame(AVFormatContext* oc, OutputStream* ost, void* arg)
347 {
348  int ret;
349  AVCodecContext* c;
350  AVFrame* frame;
351  int got_packet = 0;
352 
353  c = ost->st->codec;
354 
355  frame = get_video_frame(ost, arg);
356 
357  if (oc->oformat->flags & AVFMT_RAWPICTURE)
358  {
359  /* a hack to avoid data copy with some raw video muxers */
360  AVPacket pkt;
361  av_init_packet(&pkt);
362 
363  if (!frame)
364  return 1;
365 
366  pkt.flags |= AV_PKT_FLAG_KEY;
367  pkt.stream_index = ost->st->index;
368  pkt.data = (uint8_t*) frame;
369  pkt.size = sizeof(AVPicture);
370 
371  pkt.pts = pkt.dts = frame->pts;
372  av_packet_rescale_ts(&pkt, c->time_base, ost->st->time_base);
373 
374  ret = av_interleaved_write_frame(oc, &pkt);
375  }
376  else
377  {
378  AVPacket pkt = {0};
379  av_init_packet(&pkt);
380 
381  /* encode the image */
382  ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);
383  if (ret < 0)
384  {
385  fprintf(stderr, "Error encoding video frame: %s\n", av_err2str(ret));
386  exit(1);
387  }
388 
389  if (got_packet)
390  {
391  ret = write_frame(oc, &c->time_base, ost->st, &pkt);
392  }
393  else
394  {
395  ret = 0;
396  }
397  }
398 
399  if (ret < 0)
400  {
401  fprintf(stderr, "Error while writing video frame: %s\n", av_err2str(ret));
402  exit(1);
403  }
404 
405  return (frame || got_packet) ? 0 : 1;
406 }
407 
408 static void close_stream(OutputStream* ost)
409 {
410  avcodec_close(ost->st->codec);
411  av_frame_free(&ost->frame);
412  av_frame_free(&ost->tmp_frame);
413  sws_freeContext(ost->sws_ctx);
414  swr_free(&ost->swr_ctx);
415 }
416 
417 /**************************************************************/
418 /* media file output */
419 
420 int ffmpeg_grabber(void* arg)
421 {
422  OutputStream video_st = {0};
423  const char* filename;
424  AVOutputFormat* fmt;
425  AVFormatContext* oc;
426  AVCodec* video_codec;
427  int ret;
428  int have_video = 0;
429  int encode_video = 0;
430  AVDictionary* opt = NULL;
431 
432  struct thread_args* args = (struct thread_args*) arg;
433 
434  filename = args->record_filename;
435  av_dict_set(&opt, "author", "aic", 0);
436 
437  /* allocate the output media context */
438  avformat_alloc_output_context2(&oc, NULL, NULL, filename);
439  if (!oc)
440  {
441  LOGM("Could not deduce output format from file extension: using MPEG. %s",
442  args->record_filename);
443  avformat_alloc_output_context2(&oc, NULL, "mpeg", filename);
444  }
445  if (!oc)
446  return 1;
447 
448  fmt = oc->oformat;
449 
450  /* Add the audio and video streams using the default format codecs
451  * and initialize the codecs. */
452  if (fmt->video_codec != AV_CODEC_ID_NONE)
453  {
454  add_stream(&video_st, oc, &video_codec, fmt->video_codec);
455  have_video = 1;
456  encode_video = 1;
457  }
458 
459  /* Now that all the parameters are set, we can open
460  * video codecs and allocate the necessary encode buffers. */
461  if (have_video)
462  open_video(video_codec, &video_st, opt);
463 
464  av_dump_format(oc, 0, filename, 1);
465 
466  /* open the output file, if needed */
467  if (!(fmt->flags & AVFMT_NOFILE))
468  {
469  ret = avio_open(&oc->pb, filename, AVIO_FLAG_WRITE);
470  if (ret < 0)
471  {
472  fprintf(stderr, "Could not open '%s': %s\n", filename, av_err2str(ret));
473  return 1;
474  }
475  }
476 
477  /* Write the stream header, if any. */
478  ret = avformat_write_header(oc, &opt);
479  if (ret < 0)
480  {
481  fprintf(stderr, "Error occurred when opening output file: %s\n", av_err2str(ret));
482  return 1;
483  }
484 
485  while (encode_video)
486  {
487  encode_video = !write_video_frame(oc, &video_st, (void*) &args->mtx);
488  }
489 
490  /* Write the trailer, if any. The trailer must be written before you
491  * close the CodecContexts open when you wrote the header; otherwise
492  * av_write_trailer() may try to use memory that was freed on
493  * av_codec_close(). */
494  av_write_trailer(oc);
495 
496  /* Close each codec. */
497  if (have_video)
498  close_stream(&video_st);
499 
500  if (!(fmt->flags & AVFMT_NOFILE))
501  /* Close the output file. */
502  avio_closep(&oc->pb);
503 
504  /* free the stream */
505  avformat_free_context(oc);
506 
507  return 0;
508 }
509 
510 unsigned char* xgrabber()
511 {
512  char T[30];
513  char usec[4];
514  time_t curtime;
515  struct timeval tv;
516  struct tm cur_localtime;
517  gettimeofday(&tv, NULL);
518  curtime = tv.tv_sec;
519  localtime_r(&curtime, &cur_localtime);
520  strftime(T, 30, "%Y/%d/%m %H:%M:%S", &cur_localtime);
521 
522  snprintf(usec, 4, "%ld", tv.tv_usec);
523 
524  unsigned char* img = NULL;
525  int w, h;
526  w = g_width;
527  h = g_height;
528 
529  if (img)
530  free(img);
531  img = (unsigned char*) malloc(3 * w * h);
532  if (!img)
533  LOGE("xgrabber(): out of memory");
534  memset(img, 0, sizeof(*img));
535 
536  char string1[BUF_SIZE];
537  snprintf(string1, sizeof(string1), "%s%s", T, usec);
538 
539  XMapWindow(s_display, (Drawable) g_window_id);
540  GC gc = XCreateGC(s_display, (Drawable) g_window_id, 0, 0);
541 
542  XFillRectangle(s_display, (Drawable) g_window_id, gc, 0, g_height, g_width, g_height + 100);
543 
544  XDrawString(s_display, (Drawable) g_window_id, gc, 5, g_height - 100 + 15, string1,
545  strlen(string1));
546 
547  XImage* image = XGetImage(s_display, (Drawable) g_window_id, 0, 0, w, h, AllPlanes, ZPixmap);
548 
549  // unsigned char *array = new unsigned char[width * height * 3];
550  unsigned long red_mask = image->red_mask;
551  unsigned long green_mask = image->green_mask;
552  unsigned long blue_mask = image->blue_mask;
553 
554  int x, y = 0;
555 
556  for (x = 0; x < w; x++)
557  {
558  for (y = 0; y < h; y++)
559  {
560  unsigned long pixel = XGetPixel(image, x, y);
561 
562  unsigned char blue = pixel & blue_mask;
563  unsigned char green = (pixel & green_mask) >> 8;
564  unsigned char red = (pixel & red_mask) >> 16;
565 
566  img[(x + w * y) * 3 + 0] = blue;
567  img[(x + w * y) * 3 + 1] = green;
568  img[(x + w * y) * 3 + 2] = red;
569  }
570  }
571 
572  XDestroyImage(image);
573 
574  /*FILE *avconv = NULL;
575 
576  avconv = popen("ffmpeg -y -f rawvideo -vcodec rawvideo -s 800x600 -pix_fmt rgb444be -r 25 -i -
577  -vf vflip -an -c:v libx264 -preset slow test.mp4", "w");
578  if (avconv)
579  fwrite(img ,800*600*3 , 1, avconv);*/
580 
581  return img;
582 }
583 
584 void grab_snapshot(char* snap_filename)
585 {
586  FILE* f;
587  unsigned char* img = NULL;
588  int w, h;
589  w = g_width;
590  h = g_height;
591 
592  if (img)
593  free(img);
594  img = (unsigned char*) malloc(3 * w * h);
595  if (!img)
596  LOGE("grab_snapshot(): out of memory");
597  memset(img, 0, sizeof(*img));
598 
599  XImage* image = XGetImage(s_display, (Drawable) g_window_id, 0, 0, w, h, AllPlanes, ZPixmap);
600 
601  // unsigned char *array = new unsigned char[width * height * 3];
602  unsigned long red_mask = image->red_mask;
603  unsigned long green_mask = image->green_mask;
604  unsigned long blue_mask = image->blue_mask;
605 
606  int x, y = 0;
607 
608  for (x = 0; x < w; x++)
609  {
610  for (y = 0; y < h; y++)
611  {
612  unsigned long pixel = XGetPixel(image, x, y);
613 
614  unsigned char blue = pixel & blue_mask;
615  unsigned char green = (pixel & green_mask) >> 8;
616  unsigned char red = (pixel & red_mask) >> 16;
617 
618  img[(x + w * y) * 3 + 0] = blue;
619  img[(x + w * y) * 3 + 1] = green;
620  img[(x + w * y) * 3 + 2] = red;
621  }
622  }
623 
624  int filesize = 54 + 3 * g_width * g_height;
625  unsigned char bmpfileheader[14] = {'B', 'M', 0, 0, 0, 0, 0, 0, 0, 0, 54, 0, 0, 0};
626  unsigned char bmpinfoheader[40] = {40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 24, 0};
627  unsigned char bmppad[3] = {0, 0, 0};
628 
629  bmpfileheader[2] = (unsigned char) (filesize);
630  bmpfileheader[3] = (unsigned char) (filesize >> 8);
631  bmpfileheader[4] = (unsigned char) (filesize >> 16);
632  bmpfileheader[5] = (unsigned char) (filesize >> 24);
633 
634  bmpinfoheader[4] = (unsigned char) (w);
635  bmpinfoheader[5] = (unsigned char) (w >> 8);
636  bmpinfoheader[6] = (unsigned char) (w >> 16);
637  bmpinfoheader[7] = (unsigned char) (w >> 24);
638  bmpinfoheader[8] = (unsigned char) (h);
639  bmpinfoheader[9] = (unsigned char) (h >> 8);
640  bmpinfoheader[10] = (unsigned char) (h >> 16);
641  bmpinfoheader[11] = (unsigned char) (h >> 24);
642 
643  f = fopen(snap_filename, "wb");
644  fwrite(bmpfileheader, 1, 14, f);
645  fwrite(bmpinfoheader, 1, 40, f);
646 
647  int ii = 0;
648 
649  for (ii = 0; ii < g_height; ii++)
650  {
651  fwrite(img + (g_width * (g_height - ii - 1) * 3), 3, g_width, f);
652  fwrite(bmppad, 1, (4 - (g_width * 3) % 4) % 4, f);
653  }
654  fclose(f);
655  XDestroyImage(image);
656  free(img);
657 }
658 
659 int precv(void* arg)
660 {
661  s_read_args* args = (struct read_args*) arg;
662 
663  while (1)
664  {
665  pthread_mutex_lock(&args->mtx);
666  fd_set forread;
667 
668  FD_ZERO(&forread);
669  FD_SET(args->sock, &forread);
670  if (select(args->sock + 1, &forread, 0, 0, 0) == -1)
671  {
672  LOGW(" error select()");
673  return 0;
674  }
675  if (FD_ISSET(args->sock, &forread))
676  {
677  args->len = 0;
678  args->len =
679  recv(args->sock, args->buffer, READ_BUFFER_SIZE * sizeof(*args->buffer), MSG_PEEK);
680  if (args->len)
681  {
682  LOGW("select()1 %d", args->len);
683  args->len = recv(args->sock, args->buffer, args->len, MSG_WAITFORONE);
684  args->flagSnapRec = 1;
685  LOGW("select()2 %d %s", args->len, args->buffer);
686  pthread_cond_signal(&args->cond);
687  }
688  pthread_mutex_unlock(&args->mtx);
689  }
690  struct timespec duration = {0, 250000};
691  nanosleep(&duration, NULL);
692  }
693  return 0;
694 }
695 
696 void* pgrab(void* arg)
697 {
698  struct timespec tv;
699  // tv.tv_sec = 1;
700  tv.tv_nsec = 100 * 1000000; // 100ms
701 
702  s_thread_args grab_args;
703  pthread_t pgrab_Thread;
704 
705  RecordingPayload* recData;
706 
707  struct stat st = {0};
708  char base_path[BUF_SIZE] = "./log/";
709  g_strlcpy(base_path, s_path_results, sizeof(base_path));
710  LOGI("grabber base path: %s", base_path);
711 
712  if (stat(base_path, &st) == -1)
713  {
714  mkdir(base_path, 0700);
715  }
716 
717  s_read_args* args = (struct read_args*) arg;
718 
719  pthread_mutex_lock(&args->mtx);
720  while (1)
721  {
722  pthread_cond_timedwait(&args->cond, &args->mtx, &tv);
723  if (args->len && args->flagSnapRec)
724  {
725  char str_path[BUF_SIZE];
726  args->flagSnapRec = 0;
727  LOGW("%d %s flagRecording=%d", args->len, args->buffer, args->flagRecording);
728  recData = recording_payload__unpack(NULL, args->len, args->buffer);
729 
730  LOGM(" recData->recFilename=%s recData.startStop=%d ", recData->recfilename,
731  recData->startstop);
732  if (!strncmp("video", recData->recfilename, 5))
733  {
734  snprintf(grab_args.record_filename, sizeof(str_path), "%s%s", base_path,
735  recData->recfilename);
736 
737  if (recData->startstop && !args->flagRecording)
738  {
739  pthread_mutex_init(&grab_args.mtx, NULL);
740  pthread_mutex_lock(&grab_args.mtx);
741  pthread_create(&pgrab_Thread, NULL, (void*) &ffmpeg_grabber, &grab_args);
742  args->flagRecording = 1;
743  }
744  else if (!recData->startstop && args->flagRecording)
745  {
746  pthread_mutex_unlock(&grab_args.mtx);
747  pthread_join(pgrab_Thread, NULL);
748  args->flagRecording = 0;
749  }
750  }
751  else if (!strncmp("snap", recData->recfilename, 4) && recData->startstop == 2)
752  {
753  snprintf(str_path, sizeof(str_path), "%s%s", base_path, recData->recfilename);
754  grab_snapshot(str_path);
755  } // end video/snap
756  }
757  }
758  pthread_mutex_unlock(&args->mtx);
759 }
760 
761 void* grab_handler_sock(void* args)
762 {
763  sensor_params* data;
764  data = args;
765 
766  s_read_args* r_args = (struct read_args*) malloc(sizeof(s_read_args));
767 
768  socket_t player_fd;
769  int flag_connect = 0;
770 
771  uint8_t* read_buffer = (uint8_t*) malloc(sizeof(uint8_t) * READ_BUFFER_SIZE);
772  if (!r_args || !read_buffer)
773  LOGE("grab_handler_sock(): out of memory");
774 
775  flag_connect = 0;
776  data->flagRecording = 0;
777  r_args->flagRecording = data->flagRecording;
778  while (!flag_connect)
779  {
780  player_fd = open_socket(data->gvmip, PORT_GRAB);
781  if (player_fd == SOCKET_ERROR)
782  {
783  close(player_fd);
784  sleep(1);
785  }
786  else
787  {
788  LOGD("Connected to aicTest (TCP %d)", PORT_GRAB);
789  flag_connect = 1;
790  }
791  }
792  r_args->buffer = read_buffer;
793  r_args->sock = player_fd;
794  r_args->len = 0;
795 
796  pthread_t pread_Thread1, pread_Thread2;
797  pthread_mutex_init(&r_args->mtx, NULL);
798  pthread_create(&pread_Thread1, NULL, (void*) &precv, r_args);
799  pthread_create(&pread_Thread2, NULL, (void*) &pgrab, r_args);
800 
801  return NULL;
802 }
803 
804 void* grab_handler_amqp(void* args)
805 {
806  amqp_envelope_t envelope; // envelope.message.body = amqp_bytes_malloc ( ) ;
807  sensor_params* data;
808 
809  data = args;
810  RecordingPayload* recData;
811 
812  s_thread_args grab_args;
813 
814  struct stat st = {0};
815  char base_path[BUF_SIZE] = "./log/";
816  pthread_t pgrab_Thread;
817 
818  if (stat(base_path, &st) == -1)
819  {
820  mkdir(base_path, 0700);
821  }
822 
823  amqp_connection_state_t conn;
824  amqp_listen_retry(data->amqp_host, 5672, data->queue, &conn, 5);
825  while (1)
826  {
827  int err_amqlisten = amqp_consume(&conn, &envelope);
828  if (err_amqlisten == 0)
829  {
830  if (envelope.message.properties._flags & AMQP_BASIC_CONTENT_TYPE_FLAG)
831  {
832  recData = recording_payload__unpack(NULL, envelope.message.body.len,
833  envelope.message.body.bytes);
834  LOGM(" recData->mpegFilename=%s recData.startStop=%d ", recData->recfilename,
835  recData->startstop);
836  if (!strncmp("video", recData->recfilename, 5))
837  {
838  snprintf(grab_args.record_filename, sizeof(grab_args.record_filename), "%s%s",
839  base_path, recData->recfilename);
840 
841  if (recData->startstop && !data->flagRecording)
842  {
843  pthread_mutex_init(&grab_args.mtx, NULL);
844  pthread_mutex_lock(&grab_args.mtx);
845  pthread_create(&pgrab_Thread, NULL, (void*) &ffmpeg_grabber, &grab_args);
846  data->flagRecording = 1;
847  }
848  else if (!recData->startstop && data->flagRecording)
849  {
850  pthread_mutex_unlock(&grab_args.mtx);
851  pthread_join(pgrab_Thread, NULL);
852  data->flagRecording = 0;
853  }
854  }
855  else if (!strncmp("snap", recData->recfilename, 4) && recData->startstop == 2)
856  {
857  char str_path[BUF_SIZE];
858  snprintf(str_path, sizeof(str_path), "%s%s", base_path, recData->recfilename);
859  grab_snapshot(str_path);
860  } // end video/snap
861  } // end ifenvelope
862  } // end if err_amqlisten
863  sleep(1);
864  } // end while
865 }
#define LOGD(...)
Log at DEBUG level.
Definition: logger.h:21
A wrapper around a single output AVStream.
Definition: grabber.h:69
Utilities for consuming RabbitMQ messages.
#define SCALE_FLAGS
Definition: grabber.h:31
int needQuit(pthread_mutex_t *mtx)
Definition: grabber.c:82
uint8_t * buffer
Definition: grabber.h:50
Parameter for sensor threads.
Definition: sensors.h:31
#define LOGE(...)
Log at ERROR level (makes the application abort)
Definition: logger.h:31
#define BUF_SIZE
Small, fixed-size buffers.
Definition: buffer_sizes.h:9
void * g_window_id
Global variable for the X window id.
Definition: main.c:103
int g_height
Global variable for the window height.
Definition: main.c:87
void * grab_handler_amqp(void *args)
Definition: grabber.c:804
Struct holding the state of a recording (filename/lock)
Definition: grabber.h:60
const char * gvmip
VM IP.
Definition: sensors.h:42
pthread_mutex_t mtx
Definition: grabber.h:48
AVFrame * frame
Definition: grabber.h:77
int len
Definition: grabber.h:52
int amqp_listen_retry(const char *hostname, int port, const char *bindingkey, amqp_connection_state_t *conn, const unsigned int tries)
Setup a consumer for a specific queue.
Definition: amqp_listen.c:17
int socket_t
Alias to differenciate between regular ints and socket fds.
Definition: socket.h:13
socket_t sock
Definition: grabber.h:51
void grabber_set_display(Display *display)
Set the static X display pointer.
Definition: grabber.c:68
Shared structure between recv thread and grabber thread.
Definition: grabber.h:46
int ffmpeg_grabber(void *arg)
Function passed to a thread in order to detect format encoding - encode frame- and write in a file...
Definition: grabber.c:420
unsigned char * xgrabber()
Definition: grabber.c:510
#define RGB2V(R, G, B)
Definition: grabber.h:39
socket_t open_socket(const char *ip, short port)
Connect to a host:port couple.
Definition: socket.c:29
void * pgrab(void *arg)
Definition: grabber.c:696
pthread_cond_t cond
Definition: grabber.h:49
int g_width
Global variable for the window width.
Definition: main.c:93
#define RGB2U(R, G, B)
Definition: grabber.h:38
char queue[BUF_SIZE]
Queue name.
Definition: sensors.h:40
#define READ_BUFFER_SIZE
Definition: grabber.c:41
#define RGB2Y(R, G, B)
Definition: grabber.h:37
int flagRecording
Definition: grabber.h:54
Defines ports and structures for sensor threads.
#define STREAM_PIX_FMT
Pixel format of the stream (yuv420p)
Definition: grabber.h:29
struct SwrContext * swr_ctx
Definition: grabber.h:83
int amqp_consume(amqp_connection_state_t *conn, amqp_envelope_t *envelope)
Consume one message from a connection object.
Definition: amqp_listen.c:90
#define STREAM_FRAME_RATE
FPS of the stream.
Definition: grabber.h:27
void * grab_handler_sock(void *args)
Definition: grabber.c:761
Logging macros.
void grab_snapshot(char *snap_filename)
Definition: grabber.c:584
int precv(void *arg)
Definition: grabber.c:659
char record_filename[BUF_SIZE]
Definition: grabber.h:63
#define LOGM(...)
Log at MESSAGE level.
Definition: logger.h:25
#define LOGW(...)
Log at WARNING level.
Definition: logger.h:27
grabber records videos or snapshots from ampq messages
AVFrame * tmp_frame
Definition: grabber.h:78
#define SOCKET_ERROR
Alias for the recv() return value in case of error.
Definition: socket.h:10
Define common buffer sizes.
Define socket utilities to simplify networking.
int64_t next_pts
Definition: grabber.h:74
pthread_mutex_t mtx
Definition: grabber.h:62
struct SwsContext * sws_ctx
Definition: grabber.h:82
void grabber_set_path_results(char *results)
Set the static path to the screenshots/movies dir.
Definition: grabber.c:73
#define LOGI(...)
Log at INFO level.
Definition: logger.h:23
const char * amqp_host
AMQP host.
Definition: sensors.h:44
int8_t flagRecording
Grabber-specific ??
Definition: sensors.h:48
#define PORT_GRAB
Port open on the VM.
Definition: grabber.h:22
int flagSnapRec
Definition: grabber.h:53
AVStream * st
Definition: grabber.h:71