• Main Page
  • Related Pages
  • Namespaces
  • Classes
  • Files
  • File List
  • File Members

src/N900/Daemon.cpp

00001 #include <errno.h>
00002 #include <sys/types.h>
00003 #include <sys/stat.h>
00004 #include <sys/fcntl.h>
00005 #include <sys/ioctl.h>
00006 #include <poll.h>
00007 
00008 #include "FCam/Time.h"
00009 #include "FCam/Frame.h"
00010 #include "FCam/Action.h"
00011 
00012 #include "../Debug.h"
00013 #include "Daemon.h"
00014 #include "linux/omap34xxcam-fcam.h"
00015 
00016 namespace FCam { namespace N900 {
00017 
00018     void *daemon_setter_thread_(void *arg) {
00019         Daemon *d = (Daemon *)arg;
00020         d->runSetter();    
00021         d->setterRunning = false;    
00022         close(d->daemon_fd);
00023         pthread_exit(NULL);
00024     } 
00025 
00026     void *daemon_handler_thread_(void *arg) {
00027         Daemon *d = (Daemon *)arg;
00028         d->runHandler();
00029         d->handlerRunning = false;
00030         pthread_exit(NULL);    
00031     }
00032 
00033     void *daemon_action_thread_(void *arg) {
00034         Daemon *d = (Daemon *)arg;
00035         d->runAction();
00036         d->actionRunning = false;
00037         pthread_exit(NULL);
00038     }
00039 
00040     Daemon::Daemon(Sensor *sensor) :
00041         sensor(sensor),
00042         stop(false), 
00043         frameLimit(128),
00044         dropPolicy(Sensor::DropNewest),
00045         setterRunning(false), 
00046         handlerRunning(false), 
00047         actionRunning(false),
00048         threadsLaunched(false) {
00049         
00050         // tie ourselves to the correct sensor
00051         v4l2Sensor = V4L2Sensor::instance("/dev/video0");
00052 
00053         // make the mutexes for the producer-consumer queues
00054         if ((errno = 
00055              -(pthread_mutex_init(&actionQueueMutex, NULL) ||               
00056                pthread_mutex_init(&cameraMutex, NULL)))) {
00057             error(Event::InternalError, sensor, "Error creating mutexes: %d", errno);
00058         }
00059     
00060         // make the semaphore
00061         sem_init(&actionQueueSemaphore, 0, 0);
00062 
00063         pipelineFlush = true;
00064     }
00065 
00066     void Daemon::launchThreads() {    
00067         if (threadsLaunched) return;
00068         threadsLaunched = true;
00069 
00070         // launch the threads
00071         pthread_attr_t attr;
00072         struct sched_param param;
00073 
00074         // Open the device as a daemon
00075         daemon_fd = open("/dev/video0", O_RDWR, 0);
00076 
00077         if (daemon_fd < 0) {
00078             error(Event::InternalError, sensor, "Error opening /dev/video0: %d", errno);
00079             return;
00080         }
00081 
00082         // Try to register myself as the fcam camera client
00083         if (ioctl(daemon_fd, VIDIOC_FCAM_INSTALL, NULL)) {
00084             if (errno == EBUSY) {
00085                 error(Event::DriverError, sensor,
00086                       "An FCam program is already running");
00087             } else {
00088                 error(Event::DriverError, sensor, 
00089                       "Error %d calling VIDIOC_FCAM_INSTALL: Are the FCam drivers installed?", errno);
00090             }
00091             return;
00092         }
00093         // I should now have CAP_SYS_NICE
00094 
00095         // make the setter thread
00096         param.sched_priority = sched_get_priority_min(SCHED_FIFO)+1;
00097 
00098         pthread_attr_init(&attr);
00099 
00100         if ((errno =
00101              -(pthread_attr_setschedparam(&attr, &param) ||
00102                pthread_attr_setschedpolicy(&attr, SCHED_FIFO) ||
00103                pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED) ||
00104                pthread_create(&setterThread, &attr, daemon_setter_thread_, this)))) {
00105             error(Event::InternalError, sensor, "Error creating daemon setter thread: %d", errno);
00106             return;
00107         } else {
00108             setterRunning = true;
00109         }
00110 
00111         // make the handler thread
00112         param.sched_priority = sched_get_priority_min(SCHED_FIFO);
00113 
00114         if ((errno =
00115              -(pthread_attr_setschedparam(&attr, &param) ||
00116                pthread_attr_setschedpolicy(&attr, SCHED_FIFO) ||
00117                pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED) ||
00118                pthread_create(&handlerThread, &attr, daemon_handler_thread_, this)))) {
00119             error(Event::InternalError, sensor, "Error creating daemon handler thread: %d", errno);
00120             return;
00121         } else {
00122             handlerRunning = true;
00123         }
00124 
00125         // make the actions thread
00126         param.sched_priority = sched_get_priority_max(SCHED_FIFO);
00127 
00128         if ((errno =
00129              -(pthread_attr_setschedparam(&attr, &param) ||
00130                pthread_attr_setschedpolicy(&attr, SCHED_FIFO) ||
00131                pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED) ||
00132                pthread_create(&actionThread, &attr, daemon_action_thread_, this)))) {
00133             error(Event::InternalError, sensor, "Error creating daemon action thread: %d", errno);
00134             return;
00135         } else {
00136             actionRunning = true;
00137         }
00138 
00139         pthread_attr_destroy(&attr);
00140     }
00141 
00142     Daemon::~Daemon() {
00143         stop = true;
00144 
00145         // post a wakeup call to the action thread
00146         sem_post(&actionQueueSemaphore);
00147 
00148         if (setterRunning) 
00149             pthread_join(setterThread, NULL);
00150     
00151         if (handlerRunning)
00152             pthread_join(handlerThread, NULL);
00153 
00154         if (actionRunning)
00155             pthread_join(actionThread, NULL);
00156 
00157         pthread_mutex_destroy(&actionQueueMutex);
00158         pthread_mutex_destroy(&cameraMutex);
00159 
00160         v4l2Sensor->stopStreaming();
00161 
00162         v4l2Sensor->close();
00163     }
00164 
00165 
00166     void Daemon::setDropPolicy(Sensor::DropPolicy p, int f) {
00167         dropPolicy = p;
00168         frameLimit = f;
00169         enforceDropPolicy();
00170     }
00171 
00172     void Daemon::enforceDropPolicy() {
00173         if (frameQueue.size() > frameLimit) {
00174             warning(Event::FrameLimitHit, sensor,
00175                     "WARNING: frame limit hit (%d), silently dropping %d frames.\n"
00176                    "You're not draining the frame queue quickly enough. Use longer \n"
00177                    "frame times or drain the frame queue until empty every time you \n"
00178                    "call getFrame()\n", frameLimit, frameQueue.size() - frameLimit);
00179             if (dropPolicy == Sensor::DropOldest) {
00180                 while (frameQueue.size() >= frameLimit) {
00181                     delete frameQueue.pull();
00182                 }
00183             } else if (dropPolicy == Sensor::DropNewest) {
00184                 while (frameQueue.size() >= frameLimit) {
00185                     delete frameQueue.pullBack();
00186                 }
00187             } else {
00188                 error(Event::InternalError, sensor, 
00189                       "Unknown drop policy! Not dropping frames.\n");
00190             }
00191         }
00192     }
00193 
00194     void Daemon::runSetter() {
00195         dprintf(2, "Running setter...\n"); fflush(stdout);
00196         tickSetter(Time::now());
00197         while (!stop) {
00198             struct timeval t;
00199             if (ioctl(daemon_fd, VIDIOC_FCAM_WAIT_FOR_HS_VS, &t)) {                
00200                 if (stop) break;
00201                 error(Event::DriverError, sensor, 
00202                       "error in VIDIOC_FCAM_WAIT_FOR_HS_VS: %s", strerror(errno));
00203                 usleep(100000);
00204                 continue;
00205             }
00206             tickSetter(Time(t));
00207         }
00208 
00209     }
00210 
00211     void Daemon::tickSetter(Time hs_vs) {
00212         static _Frame *req = NULL;
00213 
00214         dprintf(3, "Current hs_vs was at %d %d\n", hs_vs.s(), hs_vs.us());
00215 
00216         // how long will the previous frame take to readout
00217         static int lastReadout = 33000;
00218 
00219         static bool ignoreNextHSVS = false;
00220 
00221         dprintf(4, "Setter: got HS_VS\n");
00222 
00223         if (ignoreNextHSVS) {
00224             dprintf(4, "Setter: ignoring it\n");
00225             ignoreNextHSVS = false;
00226             return;
00227         }
00228 
00229         // Is there a request for which I have set resolution and exposure, but not gain and WB?
00230         if (req) {
00231             dprintf(4, "Setter: setting gain and WB\n");
00232             // set the gain and predicted done time on the pending request
00233             // and then push it onto the handler's input queue and the v4l2 buffer queue
00234 
00235             if (req->shot().gain != current._shot.gain) {
00236                 v4l2Sensor->setGain(req->shot().gain);
00237                 current._shot.gain = req->shot().gain;
00238             }
00239             current.gain = req->gain = v4l2Sensor->getGain();
00240 
00241             if (req->shot().whiteBalance != current._shot.whiteBalance) {
00242                 int wb = req->shot().whiteBalance;
00243 
00244                 // Very lenient sanity checks - restricting the range is up to the auto-wb algorithm
00245                 if (wb < 0) wb = 0; // a black-body radiator at absolute zero.
00246                 if (wb > 25000) wb = 25000; // A type 'O' star.
00247 
00248                 float matrix[12];
00249                 sensor->rawToRGBColorMatrix(wb, matrix);
00250                 v4l2Sensor->setWhiteBalance(matrix);
00251 
00252                 current._shot.whiteBalance = req->shot().whiteBalance;
00253                 current.whiteBalance = wb;            
00254             }
00255             req->whiteBalance = current.whiteBalance;
00256 
00257             // Predict when this frame will be done. It should be HS_VS +
00258             // the readout time for the previous frame + the frame time
00259             // for this request + however long the ISP takes to process
00260             // this frame.
00261             req->processingDoneTime = hs_vs;
00262         
00263             // first there's the readout time for the previous frame
00264             req->processingDoneTime += lastReadout;
00265         
00266             // then there's the time to expose and read out this frame
00267             req->processingDoneTime += req->frameTime;
00268 
00269             // then there's some significant time inside the ISP if it's YUV and large
00270             // (this may be N900 specific)
00271             int ispTime = 0;
00272             if (req->image.type() == UYVY) {
00273                 if (req->image.height() > 1024 && req->image.width() > 1024) {
00274                     ispTime = 65000;
00275                 } else {
00276                     ispTime = 10000;
00277                 }
00278             }
00279             req->processingDoneTime += ispTime;               
00280         
00281             // Also compute when the exposure starts and finishes
00282             req->exposureStartTime = hs_vs + req->frameTime - req->exposure;
00283 
00284             // Now updated the readout time for this frame. This formula
00285             // is specific to the toshiba et8ek8 sensor on the n900
00286             lastReadout = (current.image.height() > 1008) ? 76000 : 33000;
00287 
00288             req->exposureEndTime  = req->exposureStartTime + req->exposure + lastReadout;
00289 
00290             // now queue up this request's actions
00291             pthread_mutex_lock(&actionQueueMutex);
00292             for (std::set<FCam::Action*>::const_iterator i = req->shot().actions().begin();
00293                  i != req->shot().actions().end();
00294                  i++) {
00295                 Action a;
00296                 a.time = req->exposureStartTime + (*i)->time - (*i)->latency;
00297                 a.action = (*i)->copy();
00298                 actionQueue.push(a);
00299             }
00300             pthread_mutex_unlock(&actionQueueMutex);
00301             for (size_t i = 0; i < req->shot().actions().size(); i++) {
00302                 sem_post(&actionQueueSemaphore);
00303             }
00304 
00305             // The setter is done with this frame. Push it into the
00306             // in-flight queue for the handler to deal with.
00307             inFlightQueue.push(req);
00308             req = NULL;
00309         } else {
00310             // a bubble!
00311             lastReadout = (current.image.height() > 1008) ? 76000 : 33000;
00312         }
00313 
00314         // grab a new request and set an appropriate exposure and
00315         // frame time for it make sure there's a request ready for us
00316         if (!requestQueue.size()) {
00317             sensor->generateRequest();
00318         }
00319 
00320         // Peek ahead into the request queue to see what request we're
00321         // going to be handling next
00322         if (requestQueue.size()) {
00323             dprintf(4, "Setter: grabbing next request\n");
00324             // There's a real request for us to handle
00325             req = requestQueue.front();
00326         } else {
00327             dprintf(4, "Setter: inserting a bubble\n");
00328             // There are no pending requests, push a bubble into the
00329             // pipeline. The default parameters for a frame work nicely as
00330             // a bubble (as short as possible, no stats generated, output
00331             // unwanted).
00332             req = new _Frame;
00333             req->_shot.wanted = false;
00334 
00335             // bubbles should just run at whatever resolution is going. If
00336             // fast mode switches were possible, it might be nice to run
00337             // at the minimum resolution to go even faster, but they're
00338             // not.
00339             req->_shot.image = Image(current._shot.image.size(), current._shot.image.type(), Image::Discard);
00340 
00341             // generate histograms and sharpness maps if they're going,
00342             // but drop the data.
00343             req->_shot.histogram  = current._shot.histogram;
00344             req->_shot.sharpness  = current._shot.sharpness;
00345 
00346             // push the bubble into the pipe
00347             requestQueue.pushFront(req);
00348         }
00349 
00350         // Check if the next request requires a mode switch
00351         if (req->shot().image.size() != current._shot.image.size() ||
00352             req->shot().image.type() != current._shot.image.type() ||
00353             req->shot().histogram  != current._shot.histogram  ||
00354             req->shot().sharpness  != current._shot.sharpness) {
00355 
00356             // flush the pipeline
00357             dprintf(3, "Setter: Mode switch required - flushing pipe\n");
00358             pipelineFlush = true;
00359 
00360             pthread_mutex_lock(&cameraMutex);
00361             dprintf(3, "Setter: Handler done flushing pipe, passing control back to setter\n");
00362         
00363             // do the mode switch
00364         
00365             if (current.image.width() > 0) {
00366                 dprintf(3, "Setter: Shutting down camera\n");
00367                 v4l2Sensor->stopStreaming();
00368                 v4l2Sensor->close();
00369             }
00370             dprintf(3, "Setter: Starting up camera in new mode\n");
00371             v4l2Sensor->open();
00372         
00373             // set all the params for the new frame
00374             V4L2Sensor::Mode m;
00375             m.width  = req->shot().image.width();
00376             m.height = req->shot().image.height();
00377             m.type   = req->shot().image.type();
00378             v4l2Sensor->startStreaming(m, req->shot().histogram, req->shot().sharpness);
00379             v4l2Sensor->setFrameTime(0);
00380         
00381             dprintf(3, "Setter: Setter done bringing up camera, passing control back "
00382                     "to handler. Expect two mystery frames.\n");
00383             pipelineFlush = false;
00384             pthread_mutex_unlock(&cameraMutex);
00385         
00386             m = v4l2Sensor->getMode();
00387             // Set destination image to new mode settings
00388             req->image = Image(m.width, m.height, m.type, Image::Discard);
00389         
00390             current._shot.image = Image(req->shot().image.size(), req->shot().image.type(), Image::Discard);
00391             current._shot.histogram  = req->shot().histogram;
00392             current._shot.sharpness  = req->shot().sharpness;
00393             
00394             // make sure we set everything else for the next frame,
00395             // because restarting streaming will have nuked it
00396             current._shot.frameTime = -1;
00397             current._shot.exposure = -1;
00398             current._shot.gain = -1.0;
00399             current._shot.whiteBalance = -1;
00400             current.image = Image(m.width, m.height, m.type, Image::Discard);
00401          
00402             req = NULL;
00403         
00404             // Wait for the second HS_VS before proceeding
00405             ignoreNextHSVS = true;
00406         
00407             return;
00408         } else {
00409             // no mode switch required
00410         }
00411 
00412         // pop the request 
00413         requestQueue.pop(); 
00414 
00415         Time next = hs_vs + current.frameTime;
00416         dprintf(3, "The current %d x %d frame has a frametime of %d\n", 
00417                 current.image.width(), current.image.height(), current.frameTime);
00418         dprintf(3, "Predicting that the next HS_VS will be at %d %d\n",
00419                 next.s(), next.us());
00420         
00421         int exposure = req->shot().exposure;
00422         int frameTime = req->shot().frameTime; 
00423 
00424         if (frameTime < exposure + 400) {
00425             frameTime = exposure + 400;
00426         }
00427 
00428         dprintf(4, "Setter: setting frametime and exposure\n");
00429         // Set the exposure
00430         v4l2Sensor->setFrameTime(frameTime);
00431         v4l2Sensor->setExposure(exposure);
00432     
00433         // Tag the request with the actual params. Also store them in
00434         // current to avoid unnecessary I2C.
00435         current._shot.frameTime = frameTime;
00436         current._shot.exposure  = exposure;
00437         current.exposure  = req->exposure  = v4l2Sensor->getExposure();
00438         current.frameTime = req->frameTime = v4l2Sensor->getFrameTime();
00439         req->image = current.image;
00440 
00441         // Exposure and frame time are set. Return and wait for the next
00442         // HS_VS before setting gain for this request-> (and pulling the next request).
00443     
00444         dprintf(4, "Setter: Done with this HS_VS, waiting for the next one\n");
00445 
00446     }
00447 
00448 
00449     void Daemon::runHandler() {
00450         _Frame *req = NULL;       
00451         V4L2Sensor::V4L2Frame *f = NULL;
00452 
00453         pthread_mutex_lock(&cameraMutex);
00454 
00455         while (!stop) {
00456 
00457             // the setter may be waiting for me to finish processing
00458             // outstanding requests
00459             if (!req && pipelineFlush && inFlightQueue.size() == 0) {        
00460                 dprintf(3, "Handler: Handler done flushing pipe, passing control back to setter\n");
00461                 while (pipelineFlush) {
00462                     pthread_mutex_unlock(&cameraMutex);
00463                     // let the setter grab the mutex. It has higher priority,
00464                     // so it should happen instantly. We put this in a while
00465                     // loop just to be sure.
00466                     usleep(10000);
00467                     pthread_mutex_lock(&cameraMutex);
00468                 }
00469                 dprintf(3, "Handler: Setter done bringing up camera, passing control back to handler\n");
00470             
00471             }
00472         
00473             if (pipelineFlush) {
00474                 dprintf(3, "Handler: Setter would like me to flush the pipeline, but I have requests in flight\n");
00475             }
00476         
00477             // wait for a frame
00478             if (!f)
00479                 f = v4l2Sensor->acquireFrame(true);
00480 
00481             if (!f) {                
00482                 error(Event::InternalError, "Handler got a NULL frame\n");
00483                 usleep(300000);
00484                 continue;
00485             } 
00486         
00487             // grab a request to match to it
00488             if (!req) {
00489                 if (inFlightQueue.size()) {
00490                     dprintf(4, "Handler: popping a frame request\n");
00491                     req = inFlightQueue.pull();
00492                 } else {
00493                     // there's no request for this frame - probably coming up
00494                     // from a mode switch or starting up
00495                     dprintf(3, "Handler: Got a frame without an outstanding request,"
00496                             " dropping it.\n");
00497                     v4l2Sensor->releaseFrame(f);
00498                     f = NULL;
00499                     continue;
00500                 }
00501             }
00502         
00503             // at this point we have a frame and a request, now look at
00504             // the time delta between them to see if they're a match
00505             int dt = req->processingDoneTime - f->processingDoneTime;
00506         
00507             dprintf(4, "Handler dt = %d\n", dt);
00508         
00509             if (dt < -25000) { // more than 25 ms late
00510                 dprintf(3, "Handler: Expected a frame at %d %d, but one didn't arrive until %d %d\n",
00511                         req->processingDoneTime.s(), req->processingDoneTime.us(),
00512                         f->processingDoneTime.s(), f->processingDoneTime.us());
00513                 req->image = Image(req->image.size(), req->image.type(), Image::Discard);
00514                 if (!req->shot().wanted) {
00515                     delete req;
00516                 } else {
00517                     // the histogram and sharpness map may still have appeared
00518                     req->histogram = v4l2Sensor->getHistogram(req->exposureEndTime, req->shot().histogram);
00519                     req->sharpness = v4l2Sensor->getSharpnessMap(req->exposureEndTime, req->shot().sharpness);
00520                     frameQueue.push(req);
00521                     enforceDropPolicy();
00522                 }
00523                 req = NULL;
00524             } else if (dt < 10000) {
00525                 // Is this frame wanted or a bubble?
00526                 if (!req->shot().wanted) {
00527                     // it's a bubble - drop it
00528                     dprintf(4, "Handler: discarding a bubble\n");
00529                     delete req;
00530                     v4l2Sensor->releaseFrame(f);
00531                 } else {
00532                 
00533                     // this looks like a match - bag and tag it
00534                     req->processingDoneTime = f->processingDoneTime;
00535 
00536                     size_t bytes = req->image.width()*req->image.height()*2;
00537                     if (f->length < bytes) bytes = f->length;
00538 
00539                     if (req->shot().image.autoAllocate()) {
00540                         req->image = Image(req->image.size(), req->image.type(), f->data).copy();
00541                     } else if (req->shot().image.discard()) {
00542                         req->image = Image(req->image.size(), req->image.type(), Image::Discard);
00543                     } else {
00544                         if (req->image.size() != req->shot().image.size()) {
00545                             error(Event::ResolutionMismatch, sensor, 
00546                                   "Requested image size (%d x %d) "
00547                                   "on an already allocated image does not "
00548                                   "match actual image size (%d x %d). Dropping image data.\n",
00549                                   req->shot().image.width(), req->shot().image.height(),
00550                                   req->image.width(), req->image.height());
00551                             req->image = Image(req->image.size(), req->image.type(), Image::Discard);
00552                             // TODO: crop instead?
00553                         } else { // the size matches
00554                             req->image = req->shot().image;
00555                             // figure out how long I can afford to wait
00556                             // For now, 10000 us should be safe
00557                             Time lockStart = Time::now();
00558                             if (req->image.lock(10000)) {
00559                                 req->image.copyFrom(Image(req->image.size(), req->image.type(), f->data));
00560                                 req->image.unlock();
00561                             } else {
00562                                 warning(Event::ImageTargetLocked, sensor,
00563                                         "Daemon discarding image data (target is still locked, "
00564                                         "waited for %d us)\n", Time::now() - lockStart);
00565                                 req->image = Image(req->image.size(), req->image.type(), Image::Discard);
00566                             }
00567                         }
00568                     }
00569 
00570                     v4l2Sensor->releaseFrame(f);
00571                     req->histogram = v4l2Sensor->getHistogram(req->exposureEndTime, req->shot().histogram);
00572                     req->sharpness = v4l2Sensor->getSharpnessMap(req->exposureEndTime, req->shot().sharpness);
00573                 
00574                     frameQueue.push(req);
00575                     enforceDropPolicy();
00576 
00577                 }
00578             
00579                 req = NULL;
00580                 f = NULL;
00581 
00582             } else { // more than 10ms early. Perhaps there was a mode switch.
00583                 dprintf(3, "Handler: Received an early mystery frame (%d %d) vs (%d %d), dropping it.\n",
00584                         f->processingDoneTime.s(), f->processingDoneTime.us(),
00585                         req->processingDoneTime.s(), req->processingDoneTime.us());
00586                 v4l2Sensor->releaseFrame(f);
00587                 f = NULL;
00588             }
00589         
00590         }
00591         pthread_mutex_unlock(&cameraMutex);
00592     
00593     }
00594 
00595 
00596     void Daemon::runAction() {
00597         dprintf(2, "Action thread running...\n");
00598         while (1) {       
00599             sem_wait(&actionQueueSemaphore);
00600             if (stop) break;
00601             // priority inversion :(
00602             pthread_mutex_lock(&actionQueueMutex);
00603             Action a = actionQueue.top();
00604             actionQueue.pop();
00605             pthread_mutex_unlock(&actionQueueMutex);
00606 
00607             Time t = Time::now();
00608             int delay = (a.time - t) - 500;
00609             if (delay > 0) usleep(delay);
00610             Time before = Time::now();
00611             // busy wait until go time
00612             while (a.time > before) before = Time::now();
00613             a.action->doAction();
00614             //Time after = Time::now();
00615             dprintf(3, "Action thread: Initiated action %d us after scheduled time\n", before - a.time);
00616             delete a.action;
00617         }
00618     }
00619 
00620 }}
00621  

Generated on Thu Jul 15 2010 17:51:28 for FCam by  doxygen 1.7.1