From 49a04b764aab5e936dd0682ea8b4c81f8a2862c2 Mon Sep 17 00:00:00 2001 From: "Adam D. Ruppe" Date: Tue, 29 Dec 2020 11:32:20 -0500 Subject: [PATCH] some apng fixes --- apng.d | 176 ++++++++++++++++++++++++++++++++++++++++++++++-- color.d | 34 +++++----- jsvar.d | 29 ++++++++ simpleaudio.d | 2 +- simpledisplay.d | 13 +++- 5 files changed, 228 insertions(+), 26 deletions(-) diff --git a/apng.d b/apng.d index e6c03de..14e07ad 100644 --- a/apng.d +++ b/apng.d @@ -1,8 +1,68 @@ /++ Support for [https://wiki.mozilla.org/APNG_Specification|animated png] files. + + History: + Originally written March 2019 with read support. + + Render support added December 28, 2020. +/ module arsd.apng; +/// +unittest { + import arsd.simpledisplay; + import arsd.game; + import arsd.apng; + + void main(string[] args) { + import std.file; + auto a = readApng(cast(ubyte[]) std.file.read(args[1])); + + auto window = create2dWindow("Animated PNG viewer", a.header.width, a.header.height); + + auto render = a.renderer(); + OpenGlTexture[] frames; + int[] waits; + foreach(frame; a.frames) { + waits ~= render.nextFrame(); + // this would be the raw data for the frame + //frames ~= new OpenGlTexture(frame.frameData.getAsTrueColorImage); + // or the current rendered ersion + frames ~= new OpenGlTexture(render.buffer); + } + + int pos; + int currentWait; + + void update() { + currentWait += waits[pos]; + pos++; + if(pos == frames.length) + pos = 0; + } + + window.redrawOpenGlScene = () { + glClear(GL_COLOR_BUFFER_BIT); + frames[pos].draw(0, 0); + }; + + auto tick = 50; + window.eventLoop(tick, delegate() { + currentWait -= tick; + auto updateNeeded = currentWait <= 0; + while(currentWait <= 0) + update(); + if(updateNeeded) + window.redrawOpenGlSceneNow(); + //}, + //(KeyEvent ev) { + //if(ev.pressed) + }); + } + + version(Demo) main(["", "/home/me/test/apngexample.apng"]); // remove from docs +} + import arsd.png; // acTL @@ -44,6 +104,7 @@ class ApngFrame { ubyte[] compressedDatastream; ubyte[] data; + MemoryImage frameData; void populateData() { if(data !is null) return; @@ -57,14 +118,24 @@ class ApngFrame { auto height = frameControlChunk.height; auto bytesPerLine = bytesPerLineOfPng(parent.header.depth, parent.header.type, width); - bytesPerLine--; // removing filter byte from this calculation since we handle separtely + bytesPerLine--; // removing filter byte from this calculation since we handle separately size_t idataIdx; ubyte[] idata; - idata.length = width * height * (parent.header.type == 3 ? 1 : 4); + MemoryImage img; + if(parent.header.type == 3) { + auto i = new IndexedImage(width, height); + img = i; + i.palette = parent.palette; + idata = i.data; + } else { + auto i = new TrueColorImage(width, height); + img = i; + idata = i.imageData.bytes; + } - ubyte[] previousLine; + immutable(ubyte)[] previousLine; foreach(y; 0 .. height) { auto filter = raw[0]; raw = raw[1 .. $]; @@ -72,15 +143,104 @@ class ApngFrame { raw = raw[bytesPerLine .. $]; auto unfiltered = unfilter(filter, line, previousLine, bpp); - previousLine = line; + previousLine = unfiltered; convertPngData(parent.header.type, parent.header.depth, unfiltered, width, idata, idataIdx); } this.data = idata; + this.frameData = img; } +} - //MemoryImage frameData; +struct ApngRenderBuffer { + ApngAnimation animation; + + public TrueColorImage buffer; + public int frameNumber; + + private FrameControlChunk prevFcc; + private TrueColorImage[] convertedFrames; + private TrueColorImage previousFrame; + + /++ + Returns number of millisecond to wait until the next frame. + +/ + int nextFrame() { + if(frameNumber == animation.frames.length) { + frameNumber = 0; + prevFcc = FrameControlChunk.init; + } + + auto frame = animation.frames[frameNumber]; + auto fcc = frame.frameControlChunk; + if(convertedFrames is null) { + convertedFrames = new TrueColorImage[](animation.frames.length); + } + if(convertedFrames[frameNumber] is null) { + frame.populateData(); + convertedFrames[frameNumber] = frame.frameData.getAsTrueColorImage(); + } + + final switch(prevFcc.dispose_op) { + case APNG_DISPOSE_OP.NONE: + break; + case APNG_DISPOSE_OP.BACKGROUND: + // clear area to 0 + foreach(y; prevFcc.y_offset .. prevFcc.y_offset + prevFcc.height) + buffer.imageData.bytes[ + 4 * (prevFcc.x_offset + y * buffer.width) + .. + 4 * (prevFcc.x_offset + prevFcc.width + y * buffer.width) + ] = 0; + break; + case APNG_DISPOSE_OP.PREVIOUS: + // put the buffer back in + + // this could prolly be more efficient, it only really cares about the prevFcc bounding box + buffer.imageData.bytes[] = previousFrame.imageData.bytes[]; + break; + } + + prevFcc = fcc; + // should copy the buffer at this point for a PREVIOUS case happening + if(fcc.dispose_op == APNG_DISPOSE_OP.PREVIOUS) { + // this could prolly be more efficient, it only really cares about the prevFcc bounding box + if(previousFrame is null){ + previousFrame = buffer.clone(); + } else { + previousFrame.imageData.bytes[] = buffer.imageData.bytes[]; + } + } + + size_t foff; + foreach(y; fcc.y_offset .. fcc.y_offset + fcc.height) { + final switch(fcc.blend_op) { + case APNG_BLEND_OP.SOURCE: + buffer.imageData.bytes[ + 4 * (fcc.x_offset + y * buffer.width) + .. + 4 * (fcc.x_offset + y * buffer.width + fcc.width) + ] = convertedFrames[frameNumber].imageData.bytes[foff .. foff + fcc.width * 4]; + foff += fcc.width * 4; + break; + case APNG_BLEND_OP.OVER: + foreach(x; fcc.x_offset .. fcc.x_offset + fcc.width) { + buffer.imageData.colors[y * buffer.width + x] = + alphaBlend( + convertedFrames[frameNumber].imageData.colors[foff], + buffer.imageData.colors[y * buffer.width + x] + ); + foff++; + } + break; + } + } + + frameNumber++; + + return fcc.delay_num * 1000 / fcc.delay_den; + } } class ApngAnimation { @@ -90,8 +250,8 @@ class ApngAnimation { ApngFrame[] frames; // default image? tho i can just load it as a png for that too. - MemoryImage render() { - return null; + ApngRenderBuffer renderer() { + return ApngRenderBuffer(this, new TrueColorImage(header.width, header.height), 0); } } @@ -111,6 +271,7 @@ ApngAnimation readApng(in ubyte[] data) { auto header = PngHeader.fromChunk(png.chunks[0]); auto obj = new ApngAnimation(); + obj.header = header; if(header.type == 3) { obj.palette = fetchPalette(png); @@ -133,6 +294,7 @@ ApngAnimation readApng(in ubyte[] data) { if(!seenFctl) continue; + assert(frameNumber == 1); // we work on frame 0 but fcTL advances it assert(obj.frames[0]); obj.frames[0].compressedDatastream ~= chunk.payload; diff --git a/color.d b/color.d index 1fdcf4e..b545e18 100644 --- a/color.d +++ b/color.d @@ -413,7 +413,7 @@ struct Color { * WARNING! This function does blending in RGB space, and RGB space is not linear! */ public enum ColorBlendMixinStr(string colu32name, string destu32name) = "{ - immutable uint a_tmp_ = (256-(255-(("~colu32name~")>>24)))&(-(1-(((255-(("~colu32name~")>>24))+1)>>8))); // to not loose bits, but 255 should become 0 + immutable uint a_tmp_ = (256-(255-(("~colu32name~")>>24)))&(-(1-(((255-(("~colu32name~")>>24))+1)>>8))); // to not lose bits, but 255 should become 0 immutable uint dc_tmp_ = ("~destu32name~")&0xffffff; immutable uint srb_tmp_ = (("~colu32name~")&0xff00ff); immutable uint sg_tmp_ = (("~colu32name~")&0x00ff00); @@ -428,11 +428,19 @@ struct Color { /// Perform alpha-blending of `fore` to this color, return new color. /// WARNING! This function does blending in RGB space, and RGB space is not linear! Color alphaBlend (Color fore) const pure nothrow @trusted @nogc { - static if (__VERSION__ > 2067) pragma(inline, true); - Color res; - res.asUint = asUint; - mixin(ColorBlendMixinStr!("fore.asUint", "res.asUint")); - return res; + version(LittleEndian) { + static if (__VERSION__ > 2067) pragma(inline, true); + Color res; + res.asUint = asUint; + mixin(ColorBlendMixinStr!("fore.asUint", "res.asUint")); + return res; + } else { + alias foreground = fore; + alias background = this; + foreach(idx, ref part; foreground.components) + part = cast(ubyte) (part * foreground.a / 255 + background.components[idx] * (255 - foreground.a) / 255); + return foreground; + } } } @@ -1006,15 +1014,6 @@ class IndexedImage : MemoryImage { TrueColorImage convertToTrueColor() const pure nothrow @trusted { auto tci = new TrueColorImage(width, height); foreach(i, b; data) { - /* - if(b >= palette.length) { - string fuckyou; - fuckyou ~= b + '0'; - fuckyou ~= " "; - fuckyou ~= palette.length + '0'; - assert(0, fuckyou); - } - */ tci.imageData.colors[i] = palette[b]; } return tci; @@ -1411,6 +1410,11 @@ void removeTransparency(IndexedImage img, Color background) /// Perform alpha-blending of `fore` to this color, return new color. /// WARNING! This function does blending in RGB space, and RGB space is not linear! Color alphaBlend(Color foreground, Color background) pure nothrow @safe @nogc { + //if(foreground.a == 255) + //return foreground; + if(foreground.a == 0) + return background; // the other blend function always returns alpha 255, but if the foreground has nothing, we should keep the background the same so its antialiasing doesn't get smashed (assuming this is blending in like a png instead of on a framebuffer) + static if (__VERSION__ > 2067) pragma(inline, true); return background.alphaBlend(foreground); } diff --git a/jsvar.d b/jsvar.d index 4f344a2..357863a 100644 --- a/jsvar.d +++ b/jsvar.d @@ -1429,6 +1429,7 @@ struct var { return v; } + /// @property static var emptyObject(var prototype) { if(prototype._type == Type.Object) return var.emptyObject(prototype._payload._object); @@ -1499,22 +1500,26 @@ struct var { return *v; } + /// @property static var emptyArray() { var v; v._type = Type.Array; return v; } + /// static var fromJson(string json) { auto decoded = parseJSON(json); return var.fromJsonValue(decoded); } + /// static var fromJsonFile(string filename) { import std.file; return var.fromJson(readText(filename)); } + /// static var fromJsonValue(JSONValue v) { var ret; @@ -1558,11 +1563,13 @@ struct var { return ret; } + /// string toJson() { auto v = toJsonValue(); return toJSON(v); } + /// JSONValue toJsonValue() { JSONValue val; final switch(payloadType()) { @@ -2625,6 +2632,28 @@ class OverloadSet : PrototypeObject { } } +unittest { + struct A { + static: + string foo(var arg) { return "generic"; } + string foo(string s) { return "string"; } + string foo(int i) { return "int"; } + string foo(float i) { return "float"; } + } + + auto os = new OverloadSet(); + os.addOverloadsOf!(A.foo); + var g = var.emptyObject; + g.foo = os; + + //g.foo()(); + assert(g.foo()("for me") == "string"); + //g.foo()("for me", "lol"); + assert(g.foo()(1) == "int"); + assert(g.foo()(5.4) == "float"); + assert(g.foo()(new Object) == "generic"); +} + bool appearsNumeric(string n) { if(n.length == 0) return false; diff --git a/simpleaudio.d b/simpleaudio.d index 5831a60..86ea5a2 100644 --- a/simpleaudio.d +++ b/simpleaudio.d @@ -56,7 +56,7 @@ enum BUFFER_SIZE_SHORT = BUFFER_SIZE_FRAMES * 2; /// A reasonable default volume for an individual sample. It doesn't need to be large; in fact it needs to not be large so mixing doesn't clip too much. enum DEFAULT_VOLUME = 20; -version(Demo) +version(Demo_simpleaudio) void main() { /+ diff --git a/simpledisplay.d b/simpledisplay.d index ee60613..0e0c102 100644 --- a/simpledisplay.d +++ b/simpledisplay.d @@ -6638,15 +6638,13 @@ struct Pen { On Windows, this means a device-independent bitmap. On X11, it is an XImage. - $(WARNING On X, do not create an Image in an application without an event loop. You may create images before running the event loop, but the event loop must run at some point before you try to actually draw the image to screen or before you exit your program.) - $(NOTE If you are writing platform-aware code and need to know low-level details, uou may check `if(Image.impl.xshmAvailable)` to see if MIT-SHM is used on X11 targets to draw `Image`s and `Sprite`s. Use `static if(UsingSimpledisplayX11)` to determine if you are compiling for an X11 target.) Drawing an image to screen is not necessarily fast, but applying algorithms to draw to the image itself should be fast. An `Image` is also the first step in loading and displaying images loaded from files. If you intend to draw an image to screen several times, you will want to convert it into a [Sprite]. - $(IMPORTANT `Image` may represent a scarce, shared resource that persists across process termination, and should be disposed of properly. On X11, it uses the MIT-SHM extension, if available, which uses shared memory handles with the X server, which is a long-lived process that holds onto them after your program terminates if you don't free it. + $(PITFALL `Image` may represent a scarce, shared resource that persists across process termination, and should be disposed of properly. On X11, it uses the MIT-SHM extension, if available, which uses shared memory handles with the X server, which is a long-lived process that holds onto them after your program terminates if you don't free it. It is possible for your user's system to run out of these handles over time, forcing them to clean it up with extraordinary measures - their GUI is liable to stop working! @@ -7861,6 +7859,11 @@ class Sprite : CapableOfBeingDrawnUpon { /// final @property int height() { return _height; } + /// + static Sprite fromMemoryImage(SimpleWindow win, MemoryImage img) { + return new Sprite(win, Image.fromMemoryImage(img)); + } + private: int _width; @@ -11515,6 +11518,10 @@ mixin DynamicLoad!(XRender, "Xrender", 1, false, true) XRenderLibrary; shminfo.readOnly = 0; XShmAttach(display, &shminfo); XDisplayConnection.registerImage(this); + // if I don't flush here there's a chance the dtor will run before the + // ctor and lead to a bad value X error. While this hurts the efficiency + // it is local anyway so prolly better to keep it simple + XFlush(display); } else { if (forcexshm) throw new Exception("can't create XShm Image"); // This actually needs to be malloc to avoid a double free error when XDestroyImage is called