ftp.nice.ch/pub/next/connectivity/news/NewsBase.3.02.s.tar.gz#/NewsBase302.source/MMEdit/IVdrD.m

This is IVdrD.m in view mode; [Download] [Up]

#import "IVdrD.h"
#import <appkit/NXBitmapImageRep.h>
#import <appkit/tiff.h>
#import <appkit/Panel.h>
#import <appkit/Application.h>
#import <dpsclient/dpsclient.h>
#import <objc/hashtable.h>
#import <appkit/ButtonCell.h>
#import <sys/time.h>
#import <math.h>
#import "IGraphicImage.h"
#import "ITimedImage.h"
#import "ITimedSound.h"
#import "vdr.h"
#import "drawRect.h"

#define TOLERANCE 0.010
#define IMAGE_FRAME_LAG_TIME -0.15

@implementation IVdrD

static		NXAtom rtfControlWord;
static		NXAtom fileExtension;
static double	tolerance = TOLERANCE;

+ initialize
{
    rtfControlWord = NXUniqueStringNoCopy("MMvdr");
    fileExtension = NXUniqueStringNoCopy("vdr");
    return(self);
}

+ (NXAtom)rtfControlWord
{
    return(rtfControlWord);
}

+ (NXAtom)fileExtension;
{
    return(fileExtension);
}

+ (NXAtom)pasteboardType
{
    return(NULL);
}

+ (const char *)icon
{
    return("mtif_icon");
}

- (NXImage *)image
{
    return(image);
}

- (BOOL)readFromStream:(NXStream *)inStream
{
    char *buffer;
    int len, maxLen;
    NXStream *tempStream;
    BOOL endOfStream;
    struct videoHeader *header;
    NXBitmapImageRep *bitmap;
    ITimedImage *frame;
    ITimedSound *fragment;
    double time, previousFrameTime;
    NXSize videoFrameSize;
    NXRect videoViewFrame;
    NXRect videoButtonsFrame;
    NXRect videoWindowFrame;

    [super readFromStream:inStream];
    frameList = [List allocFromZone:[self zone]];
    fragmentList = [List allocFromZone:[self zone]];
    NXGetMemoryBuffer(stream, &buffer, &len, &maxLen);
    interval = MAXDOUBLE;
    previousFrameTime = -MAXDOUBLE;
    header = (struct videoHeader *)buffer;
    endOfStream = NO;
    while(endOfStream == NO) {
        tempStream = NXOpenMemory(NULL, 0, NX_READWRITE);
        NXWrite(tempStream, (char *)header + sizeof(struct videoHeader),
            header->length);
        NXSeek(tempStream, (long)0, NX_FROMSTART);
        switch(header->magic) {
        case VID_TIFF_MAGIC:
            bitmap = [[NXBitmapImageRep alloc] initFromStream:tempStream];
            if (bitmap != nil) {
                [bitmap getSize:&videoFrameSize];
                frame = [[ITimedImage allocFromZone:[self zone]]
                    initSize:&videoFrameSize];
                [frame setTime:header->time];
                [frame lockFocus];
                [bitmap draw];
                [frame unlockFocus];
                [bitmap free];
                time = [frame time];
                if (time - previousFrameTime < interval) {
                    interval = time - previousFrameTime;
                }
                previousFrameTime = time;
                [frameList addObject:frame];
            } else {
                NXRunAlertPanel("NewsBase", "Cannot load video frame",
                    NULL, NULL, NULL);
            }
            break;
        case VID_SND_MAGIC:
            fragment = [ITimedSound new];
            [fragment setTime:header->time];
            [fragment readStream:tempStream];
            [fragmentList addObject:fragment];
            break;
        case VID_END_MAGIC:
            endOfStream = YES;
            break;
        }
        NXCloseMemory(tempStream, NX_FREEBUFFER);
        header = (struct videoHeader *)((char *)header
            + sizeof(struct videoHeader) + header->length);
    }
    interval /= 2.0;
    NXSeek(stream, (long)0, NX_FROMEND);
    size = NXTell(stream);
    NXSeek(stream, (long)0, NX_FROMSTART);
    videoButtonsFrame.origin.x = videoButtonsFrame.origin.y = 0.0;
    videoButtonsFrame.size.width = videoFrameSize.width;
    videoButtonsFrame.size.height = 30.0;
    videoButtons = [[Matrix alloc] initFrame:&videoButtonsFrame
        mode:NX_RADIOMODE cellClass:[ButtonCell class] numRows:2 numCols:2];
    [videoButtons setAutosizeCells:YES];
    [videoButtons setTarget:self];
    [videoButtons setAction:@selector(actionForButtons:)];
    [videoButtons setTitle:"PLAY" at:0 :0];
    [videoButtons setTitle:"STOP" at:0 :1];
    [videoButtons setTitle:"STEP" at:1 :0];
    [videoButtons setTitle:"BACK" at:1 :1];
    [videoButtons setTag:0 at:0 :0];
    [videoButtons setTag:1 at:0 :1];
    [videoButtons setTag:2 at:1 :0];
    [videoButtons setTag:3 at:1 :1];
    videoViewFrame.origin.x = 0.0;
    videoViewFrame.origin.y = videoButtonsFrame.size.height;
    videoViewFrame.size = videoFrameSize;
    videoView = [[View alloc] initFrame:&videoViewFrame];
    videoWindowFrame.origin.x = videoWindowFrame.origin.y = 0.0;
    videoWindowFrame.size.width = videoViewFrame.size.width;
    videoWindowFrame.size.height = videoButtonsFrame.size.height +
        videoViewFrame.size.height;
    videoWindow  = [[View alloc] initFrame:&videoWindowFrame];
    [videoWindow addSubview:videoView];
    [videoWindow addSubview:videoButtons];
    dpsTimedEntryId = (DPSTimedEntry)-1;
    currentFrameNo = 0;
    image = [[NXImage alloc] setSize:&videoWindowFrame.size];
    [image lockFocus];
    [[frameList objectAt:currentFrameNo] composite:NX_COPY
        toPoint:&videoViewFrame.origin];
    [videoButtons drawSelf:(NXRect *)0 :0];
    [image unlockFocus];
    return(YES);
}

- (void)writeToStream:(NXStream *)outStream
{
    if (stream != NULL) {
        [super writeToStream:outStream];
    } 
}

- free
{
    if (dpsTimedEntryId != (DPSTimedEntry)-1) {
        [self stopPlay];
    }
    if ([videoWindow superview] != nil) {
        [videoWindow removeFromSuperview];
    }
    [frameList freeObjects];
    [frameList free];
    [fragmentList freeObjects];
    [fragmentList free];
    [image free];
    [videoButtons free];
    [videoView free];
    [videoWindow free];
    return([super free]);
}

- performDoubleClickAction:sender
{
    NXPoint point;
    NXRect videoWindowFrame;
    void removeVideoViewFromSuperview(DPSTimedEntry, double, char *);

    if ([videoWindow superview] == nil) {
        // notify the IGraphicImage and INewsBaseText that there is an
        // embedded view
        [sender setIsActiveEmbeddedView:YES];
        [[sender view] addEmbeddedViewController:sender];
        [[sender view] display];
        [sender getOrigin:&point];
        [videoWindow getFrame:&videoWindowFrame];
        [[sender view] addSubview:videoWindow];
        [videoWindow moveTo:point.x :point.y - videoWindowFrame.size.height];
        [videoWindow display];
    } else {
        if (dpsTimedEntryId != (DPSTimedEntry)-1) {
            [self stopPlay];
        }
/*
        [videoButtons lockFocus];
        [image composite:NX_COPY toPoint:&point];
        [videoButtons unlockFocus];
        [[videoWindow window] flushWindow];
*/
        DPSAddTimedEntry(0.0, (DPSTimedEntryProc)removeVideoViewFromSuperview,
            (void *)self, NX_MODALRESPTHRESHOLD);
        [sender setIsActiveEmbeddedView:NO];
        [[sender view] removeEmbeddedViewController:sender];
        [[sender view] display];
    }
    return(self);
}

- actionForButtons:sender
{
    enum {PLAY = 0, STOP = 1, PLUS = 2, MINUS = 3};

    switch([[sender selectedCell] tag]) {
    case PLAY:
        return([self play]);
    case STOP:
        return([self stop]);
    case PLUS:
        return([self plus]);
    case MINUS:
        return([self minus]);
    }
    return(self);
}

- play
{
    void startEventsForTime(DPSTimedEntry, double, char *);

    if (dpsTimedEntryId == (DPSTimedEntry)-1) {
        if (currentFrameNo >= [frameList count]) {
            currentFragmentNo = currentFrameNo = 0;
        }
        SNDReserve(SND_ACCESS_IN | SND_ACCESS_OUT | SND_ACCESS_DSP, MAXINT);
        realStartTime = -MAXDOUBLE;
        startTime = -MAXDOUBLE;
        dpsTimedEntryId = DPSAddTimedEntry(interval, 
            (DPSTimedEntryProc)startEventsForTime, (void *)self,
            NX_MODALRESPTHRESHOLD);
    }
    return(self);
}

- stop
{
    if (dpsTimedEntryId != (DPSTimedEntry)-1) {
        [self stopPlay];
    }
    return(self);
}

- plus
{
    [self displayNextFrame:1];
    return(self);
}

- minus
{
    [self displayNextFrame:-1];
    return(self);
}

- stopPlay
{
    DPSRemoveTimedEntry(dpsTimedEntryId);
    dpsTimedEntryId = (DPSTimedEntry)-1;
    SNDUnreserve(SND_ACCESS_IN | SND_ACCESS_OUT | SND_ACCESS_DSP);
    return(self);
}

- (BOOL)displayNextFrame:(int)direction
{
    static NXPoint point = {0.0, 0.0};
    NXImage *frame;

    if (dpsTimedEntryId != (DPSTimedEntry)-1) {
        [self stopPlay];
    }
    currentFrameNo += direction;
    if (currentFrameNo >= (int)[frameList count]) {
        currentFrameNo = 0;
    } else if (currentFrameNo <= -1) {
        currentFrameNo = [frameList count] - 1;
    }
    frame = [frameList objectAt:currentFrameNo];
    [videoView lockFocus];
    [frame composite:NX_COPY toPoint:&point];
    [videoView unlockFocus];
    [[videoWindow window] flushWindow];
    return(YES);
}

- (BOOL)startEventsForTime:(double)realTime
{
    static NXPoint point = {0.0, 0.0};
    static double frameLagTime = IMAGE_FRAME_LAG_TIME;
    double realElapsedTime;
    double startFrameTime, startFragmentTime, elapsedTime;
    ITimedImage *frame;
    ITimedSound *fragment;
    static ITimedSound *previousFragment;
    struct timezone tzone;
    struct timeval realtime;

    gettimeofday(&realtime,&tzone);
    realTime = (double)realtime.tv_sec + (double)realtime.tv_usec/1.0E6;
    if (realStartTime == -MAXDOUBLE) {
        realStartTime = realTime;
        if ((frame = [frameList objectAt:currentFrameNo]) != nil) {
            startFrameTime = [frame time];
        } else {
            startFrameTime = MAXDOUBLE;
        }
        if ((fragment = [fragmentList objectAt:currentFragmentNo]) != nil) {
            startFragmentTime = [fragment time];
        } else {
            startFragmentTime = MAXDOUBLE;
        }
        if (startFrameTime < startFragmentTime) {
            startTime = startFrameTime;
        } else {
            startTime = startFragmentTime;
        }
//      framesPlayed = 0;
//      framesSkipped = 0;
        previousFragment = nil;
    }
    realElapsedTime = realTime - realStartTime;
    while ((fragment = [fragmentList objectAt:currentFragmentNo]) != nil &&
        (elapsedTime = [fragment time] - startTime) <
        realElapsedTime - tolerance) {
        fprintf(stderr, "fragment no = %d %e\n", currentFragmentNo, 
            elapsedTime - realElapsedTime);
        ++currentFragmentNo;
    }
    if (fragment != nil && elapsedTime - realElapsedTime < 2.0 * interval
        && fragment != previousFragment) {
        [previousFragment stop];
        [fragment play];
        previousFragment = fragment;
        fprintf(stderr, "playing sound fragment %d\n", currentFragmentNo);
    }
    while ((frame = [frameList objectAt:currentFrameNo]) != nil &&
        (elapsedTime = [frame time] - startTime) <
            realElapsedTime + frameLagTime - tolerance) {
//      ++framesSkipped;
        ++currentFrameNo;
        gettimeofday(&realtime,&tzone);
        realTime = (double)realtime.tv_sec + (double)realtime.tv_usec/1.0E6;
        realElapsedTime = realTime - realStartTime;
    }
    if (frame != nil && elapsedTime - (realElapsedTime + frameLagTime) 
        < 2.0 * interval) {
        [videoView lockFocus];
        [frame composite:NX_COPY toPoint:&point];
        [videoView unlockFocus];
        [[videoWindow window] flushWindow];
        NXPing();
//      ++framesPlayed;
        ++currentFrameNo;
    }
    if (fragment == nil && frame == nil) {
        [previousFragment stop];
        return(NO);
    } else {
        return(YES);
    }
}

void startEventsForTime(DPSTimedEntry teNo, double time, char * video)
{
    if ([(IVdrD *)video startEventsForTime:time] == NO) {
        [(IVdrD *)video stopPlay];
    }
}

void removeVideoViewFromSuperview(DPSTimedEntry teNo, double time,
    char * video)
{
    View *superview;

    superview = [((IVdrD *)video)->videoWindow superview];
    [((IVdrD *)video)->videoWindow removeFromSuperview];
    DPSRemoveTimedEntry(teNo);
    [superview display];
}

@end

These are the contents of the former NiCE NeXT User Group NeXTSTEP/OpenStep software archive, currently hosted by Netfuture.ch.