summaryrefslogtreecommitdiffstats
path: root/examples/camera.c
blob: 58ab1c3ea52a3ed23d80de5cb850286cdd90e80c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155

/**
 * @example camera.c
 * Question: I need to display a live camera image via VNC. Until now I just 
 * grab an image, set the rect to modified and do a 0.1 s sleep to give the 
 * system time to transfer the data.
 * This is obviously a solution which doesn't scale very well to different 
 * connection speeds/cpu horsepowers, so I wonder if there is a way for the 
 * server application to determine if the updates have been sent. This would 
 * cause the live image update rate to always be the maximum the connection 
 * supports while avoiding excessive loads.
 *
 * Thanks in advance,
 *
 *
 * Christian Daschill 
 *
 *
 * Answer: Originally, I thought about using seperate threads and using a
 * mutex to determine when the frame buffer was being accessed by any client
 * so we could determine a safe time to take a picture.  The probem is, we
 * are lock-stepping everything with framebuffer access.  Why not be a
 * single-thread application and in-between rfbProcessEvents perform a
 * camera snapshot.  And this is what I do here.  It guarantees that the
 * clients have been serviced before taking another picture.
 *
 * The downside to this approach is that the more clients you have, there is
 * less time available for you to service the camera equating to reduced
 * frame rate.  (or, your clients are on really slow links). Increasing your
 * systems ethernet transmit queues may help improve the overall performance
 * as the libvncserver should not stall on transmitting to any single
 * client.
 *
 * Another solution would be to provide a seperate framebuffer for each
 * client and use mutexes to determine if any particular client is ready for
 * a snapshot.  This way, your not updating a framebuffer for a slow client
 * while it is being transferred.
 */

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <rfb/rfb.h>


#define WIDTH  640
#define HEIGHT 480
#define BPP      4

/* 15 frames per second (if we can) */
#define PICTURE_TIMEOUT (1.0/15.0)


/*
 * throttle camera updates
*/
int TimeToTakePicture() {
    static struct timeval now={0,0}, then={0,0};
    double elapsed, dnow, dthen;

    gettimeofday(&now,NULL);

    dnow  = now.tv_sec  + (now.tv_usec /1000000.0);
    dthen = then.tv_sec + (then.tv_usec/1000000.0);
    elapsed = dnow - dthen;

    if (elapsed > PICTURE_TIMEOUT)
      memcpy((char *)&then, (char *)&now, sizeof(struct timeval));
    return elapsed > PICTURE_TIMEOUT;
}



/*
 * simulate grabbing a picture from some device
 */
int TakePicture(unsigned char *buffer)
{
  static int last_line=0, fps=0, fcount=0;
  int line=0;
  int i,j;
  struct timeval now;

  /*
   * simulate grabbing data from a device by updating the entire framebuffer
   */
  
  for(j=0;j<HEIGHT;++j) {
    for(i=0;i<WIDTH;++i) {
      buffer[(j*WIDTH+i)*BPP+0]=(i+j)*128/(WIDTH+HEIGHT); /* red */
      buffer[(j*WIDTH+i)*BPP+1]=i*128/WIDTH; /* green */
      buffer[(j*WIDTH+i)*BPP+2]=j*256/HEIGHT; /* blue */
    }
    buffer[j*WIDTH*BPP+0]=0xff;
    buffer[j*WIDTH*BPP+1]=0xff;
    buffer[j*WIDTH*BPP+2]=0xff;
  }

  /*
   * simulate the passage of time
   *
   * draw a simple black line that moves down the screen. The faster the
   * client, the more updates it will get, the smoother it will look!
   */
  gettimeofday(&now,NULL);
  line = now.tv_usec / (1000000/HEIGHT);
  if (line>HEIGHT) line=HEIGHT-1;
  memset(&buffer[(WIDTH * BPP) * line], 0, (WIDTH * BPP));

  /* frames per second (informational only) */
  fcount++;
  if (last_line > line) {
    fps = fcount;
    fcount = 0;
  }
  last_line = line;
  fprintf(stderr,"%03d/%03d Picture (%03d fps)\r", line, HEIGHT, fps);

  /* success!   We have a new picture! */
  return (1==1);
}




/* 
 * Single-threaded application that interleaves client servicing with taking
 * pictures from the camera.  This way, we do not update the framebuffer
 * while an encoding is working on it too (banding, and image artifacts).
 */
int main(int argc,char** argv)
{                                       
  long usec;
  
  rfbScreenInfoPtr server=rfbGetScreen(&argc,argv,WIDTH,HEIGHT,8,3,BPP);
  if(!server)
    return 0;
  server->desktopName = "Live Video Feed Example";
  server->frameBuffer=(char*)malloc(WIDTH*HEIGHT*BPP);
  server->alwaysShared=(1==1);

  /* Initialize the server */
  rfbInitServer(server);           

  /* Loop, processing clients and taking pictures */
  while (rfbIsActive(server)) {
    if (TimeToTakePicture())
      if (TakePicture((unsigned char *)server->frameBuffer))
        rfbMarkRectAsModified(server,0,0,WIDTH,HEIGHT);
          
    usec = server->deferUpdateTime*1000;
    rfbProcessEvents(server,usec);
  }
  return(0);
}